コード例 #1
0
ファイル: test_function.py プロジェクト: iaciac/networkx
 def test_create_empty_copy(self):
     G = nx.create_empty_copy(self.G, with_data=False)
     assert_nodes_equal(G, list(self.G))
     assert_equal(G.graph, {})
     assert_equal(G._node, {}.fromkeys(self.G.nodes(), {}))
     assert_equal(G._adj, {}.fromkeys(self.G.nodes(), {}))
     G = nx.create_empty_copy(self.G)
     assert_nodes_equal(G, list(self.G))
     assert_equal(G.graph, self.G.graph)
     assert_equal(G._node, self.G._node)
     assert_equal(G._adj, {}.fromkeys(self.G.nodes(), {}))
コード例 #2
0
ファイル: test_function.py プロジェクト: Bludge0n/AREsoft
 def test_create_empty_copy(self):
     G=networkx.create_empty_copy(self.G, with_nodes=False)
     assert_equal(G.nodes(),[])
     assert_equal(G.graph,{})
     assert_equal(G.node,{})
     assert_equal(G.edge,{})
     G=networkx.create_empty_copy(self.G)
     assert_equal(G.nodes(),self.G.nodes())
     assert_equal(G.graph,{})
     assert_equal(G.node,{}.fromkeys(self.G.nodes(),{}))
     assert_equal(G.edge,{}.fromkeys(self.G.nodes(),{}))
コード例 #3
0
    def allocate_role(self):
        # Make a copy of the graph
        graph = nx.create_empty_copy(self._graph, with_nodes=True)
        graph.add_edges_from(self._graph.edges())

        while len(graph.nodes()) > 0:
            # Try to get a node with degree 1
            nodes = self._get_nodes_with_degree_one(graph)
            if len(nodes) > 0:
                for node in nodes:
                    #if not self._has_p_neighbors(node):
                    # Mark as PE
                    self._graph.node[node]['vrf_role'] = 'PE'
                    # Mark neighbor as P
                    neighbors = graph.neighbors(node)
                    for neighbor in neighbors:
                        self._graph.node[neighbor]['vrf_role'] = 'P'
                graph.remove_node(node)
                graph.remove_nodes_from(neighbors)
                    #else:
                        # Mark as P and remove
                    #    self._graph.node[node]['vrf_role'] = 'P'
                    #    print ' - Mark %s as P' % node
                    #    graph.remove_node(node)
            else:
                node_with_max_degree = self._get_max_degree_node(graph)
                if node_with_max_degree is not None:
                    self._graph.node[node_with_max_degree]['vrf_role'] = 'P'
                    graph.remove_node(node_with_max_degree)
コード例 #4
0
ファイル: Prim.py プロジェクト: andrewalker/grafos
def Prim(G = nx.Graph(), R = None):
    # Q é a lista de vértices que não estão na árvore
    Q    = {}
    # pred armazenará o predecessor de cada vértice
    pred = {}

    # Inicializamos Q com todos os vértices com valor infinito, pois neste
    # ponto ainda não há ligação entre nenhum vértice. Igualmente, nenhum
    # vértice tem predecessor, portanto utilizamos o valor 'null'.
    for v,data in G.nodes(data=True):
        Q[v]    = n.inf
        pred[v] = 'null'

    # Caso não haja pesos definidos para os vértices, atribuímos o valor 1.0.
    # Esta é uma abordagem alternativa à que usamos em Kruskal, de utilizar uma
    # variável para verificar se estamos levando em conta o peso ou não.
    for e,x in G.edges():
        if ('weight' not in G[e][x]):
            G[e][x]['weight'] = 1.0

    # Inicializamos a raiz da árvore com valor 0, e criamos uma árvore chamada
    # MST apenas com os vértices de G.
    Q[R] = 0.0
    MST  = nx.create_empty_copy(G)

    while Q:
        # u := índice do menor elemento de Q
        # pois queremos o vértice de menor peso
        u = min(Q,key=Q.get)

        # removemos de Q, pois ele será adicionado na árvore
        del Q[u]

        # guardamos os pesos mínimos de cada vizinho de u em Q, se forem
        # menores do que os já armazenados
        for vizinho in G[u]:
            if vizinho in Q:
                if G[u][vizinho]['weight'] < Q[vizinho]:
                    pred[vizinho] = u
                    Q[vizinho]    = G[u][vizinho]['weight']

        # Se existirem predecessores para u, então adicionaremos as arestas
        # conectando o vértice u a seus predecessores
        if pred[u] is not 'null':
            for v1,v2,data in G.edges(data=True):
                # para preservar os dados da aresta, foi necessário esse loop
                # que verifica todas as arestas do grafo e procura a aresta
                # (pred(u),u), porém, como um grafo não direcionado da
                # biblioteca não duplica a existência de suas arestas no
                # conjunto de arestas, isto é, se tem (u,v) não tem (v,u), há a
                # necessidade de verificar, no caso de grafos não direcionados,
                # se há a existência da aresta (u,pred(u)) ao invés de
                # (pred(u),u)
                if ( v1 is pred[u] and v2 is u ):
                    MST.add_edge(pred[u],u,data)
                elif (  ( v1 is u and v2 is pred[u] ) and
                        ( not nx.is_directed(G) )  ):
                    MST.add_edge(pred[u],u,data)

    return MST
コード例 #5
0
ファイル: max_flow.py プロジェクト: Patrick-Payne/snippets
def max_flow(graph, source, sink, attribute='capacity'):
    """Return the maximum flow through a flow network.

    Uses the Edmonds-Karp algorithm.
    Parameters:
    graph -- an nx.Digraph object representing the flow network. Antiparallel
        edges are supported, but other contraints on flow networks must hold.
    source -- The source node on the graph.
    sink -- The sink node on the graph.
    attribute -- the edge attribute containing the capacities.

    Returns: An nx.Digraph representing the flow.
    """
    # Eliminate any antiparallel edges.
    simplified_graph = graph.copy()
    fake_nodes = remove_antiparallel(simplified_graph)

    # Create the empty flow.
    curr_flow = nx.create_empty_copy(simplified_graph)

    # Keep augmenting the flow with paths from the residual network until
    # no more augmenting paths exist.
    can_augment = True
    while can_augment:
        can_augment = augment(simplified_graph, curr_flow,
                              source, sink, attribute)

    # Restore antiparallel edges.
    restore_antiparallel(curr_flow, fake_nodes)

    return curr_flow
コード例 #6
0
ファイル: max_flow.py プロジェクト: Patrick-Payne/snippets
def residual_flow(network, flow, attribute='capacity'):
    """Return the residual flow through a flow network.

    Parameters:
    network -- The flow network, represented using an nx.DiGraph object.
    flow -- The flow through the network, also represented by an nx.DiGraph.
    attribute -- The name of the edge attribute containing the capacity.

    Returns: An nx.Digraph representing the residual flow.
    """
    residual = nx.create_empty_copy(network)
    for u, v in network.edges_iter():
        # Get the edge attributes from each graph
        capacity = network[u][v][attribute]
        used = flow[u][v][attribute] if (u, v) in flow.edges() else 0
        excess = capacity - used
        assert excess >= 0, "Flow had edges greater than capacity."

        # Add in any leftover forward capacity.
        if excess > 0:
            residual.add_edge(u, v, {attribute: excess})
        # Add in the decreasing flow residual
        if used > 0:
            residual.add_edge(v, u, {attribute: used})

    return residual
コード例 #7
0
ファイル: graph_operation.py プロジェクト: iwankoTG/graph
nx.draw(H, with_labels=True)
fig.text(0.05, 0.90, "compose", fontweight='bold')
plt.show()

#complement
H = nx.complement(Gb)
plt.subplot(1, 2, 1)
plt.title("Gb:ladder graph", fontweight='bold')
nx.draw(Gb, with_labels=True)
plt.subplot(1, 2, 2)
plt.title("complement", fontweight='bold')
nx.draw(H, with_labels=True)
plt.show()

#create empty copy
H = nx.create_empty_copy(Gb)
plt.subplot(1, 2, 1)
plt.title("Gb:ladder graph", fontweight='bold')
nx.draw(Gb, with_labels=True)
plt.subplot(1, 2, 2)
plt.title("create empty copy", fontweight='bold')
nx.draw(H, with_labels=True)
plt.show()

#create empty copy
H = nx.to_directed(Ga)
plt.subplot(1, 2, 1)
plt.title("Ga:undirected graph", fontweight='bold')
nx.draw(Ga, with_labels=True)
plt.subplot(1, 2, 2)
plt.title("directed graph", fontweight='bold')
コード例 #8
0
ファイル: network_analysis.py プロジェクト: Jung-hye-in/-.csv
import network_utilities as nw
import networkx
import random, os, numpy, copy
from matplotlib import pyplot as plt

A_network = networkx.Graph()
A_genelist = []
count = 1
while (count < len(A_preprocessing)):
    i = A_preprocessing[0][count]
    A_genelist.append(i)
    count = count + 1
A_network.add_nodes_from(A_genelist)
n_node = A_network.number_of_nodes()
n_edge = A_network.number_of_edges()
new_graph = networkx.create_empty_copy(A_network)
nodes = list(A_network.nodes())

random_nodes = list(A_network.nodes())
random.shuffle(random_nodes)
new_graph.add_edges_from([(nodes[i], random_nodes[i])
                          for i in range(len(nodes))])


#edge의 개수
def get_number_of_distinct_edges(G):
    edges_list = G.edges()
    edge_set = set()
    for id1, id2 in edges_list:
        edge_set.add((id1, id2))
    return len(edge_set)
コード例 #9
0
ファイル: SSMO_MH.py プロジェクト: lee-jingu/SSMOECHS
def Optimizer(network,
              Alive_Node,
              Update=False,
              R=30,
              In_Median=30,
              First=False):
    NET_MAX = 0
    SSMO_NET = nx.create_empty_copy(network)
    SSMO_CHID = []
    NB_Cluster = max(round(cf.P_CH * len(Alive_Node)), 1)
    update = 0
    if Update == True:
        Rmax = 0
        for i in Alive_Node:
            R_tmp = math.sqrt((SSMO_NET.node[i]['RTBS']**2) / NB_Cluster)
            if R_tmp > Rmax:
                Rmax = R_tmp

            if Rmax != R:
                R = Rmax
                update = 1

    if update == 1:
        INNER = []
        for i in Alive_Node:
            if SSMO_NET.node[i]['RTBS'] < R:
                INNER.append(i)
            SSMO_NET.node[i]['Cover'] = []
            for j in Alive_Node:
                if i == j:
                    continue
                x1, y1 = SSMO_NET.node[i]['pos']
                x2, y2 = SSMO_NET.node[j]['pos']
                D = math.sqrt((x1 - x2)**2 + (y1 - y2)**2)
                if D < R:
                    SSMO_NET.node[i]['Cover'].append(j)
        In_Median = np.median(INNER)
        if len(INNER) == 0:
            In_Median = 0

    ## Initializing Phase
    SM_Arr = []
    MG = 5
    MIR = 100
    Swarm_Size = 40
    FIT = []

    MGLL = 20
    MLLL = 8
    Group0 = []
    Group1 = []
    Group2 = []
    Group3 = []

    for i in range(0, Swarm_Size):
        choice = np.random.choice(Alive_Node, NB_Cluster, replace=False)
        SM_Arr.append(choice)
        Group0.append(i)
        FIT.append(Get_Fitness(SSMO_NET, choice, Alive_Node))

    Group = 1
    GLID = np.where(FIT == np.max(FIT))[0][0]
    LLID_ARR = np.zeros(MG, dtype=np.int32)
    LLID_ARR[0] = GLID
    Pr = 0.1
    GLL = 0
    for Iter in range(0, MIR):
        LLL = 0

        ## Local Leader Phase
        Pr += (0.4 - 0.1) / MIR
        for i in range(0, Group):
            if i == 0:
                temp = Group0
            if i == 1:
                temp = Group1
            if i == 2:
                temp = Group2
            if i == 3:
                temp = Group3

            LLID = LLID_ARR[i]
            LLMAX = FIT[LLID]
            LMAX = FIT[LLID]
            MAXFIT = FIT[LLID]

            for j in temp:
                if j == LLID or j == GLID:
                    continue

                if random() < Pr:
                    Prob_Arr = []
                    LL = SM_Arr[LLID]
                    SM = SM_Arr[j]
                    Rand = np.random.choice(temp, 1)[0]
                    SMR = SM_Arr[Rand]
                    ARANGE = np.hstack([SM, LL, SMR])
                    b = uniform(0, 1)
                    d = uniform(-1, 1)
                    PROBSM = np.ones(NB_Cluster) * (1 - b - d)
                    PROBLL = np.ones(NB_Cluster) * (b)
                    PROBSMR = np.ones(NB_Cluster) * (d)
                    Prob_Arr = np.hstack([PROBSM, PROBLL, PROBSMR])
                    Prob_Arr = np.exp(Prob_Arr) / np.sum(np.exp(Prob_Arr))
                    choice = np.random.choice(ARANGE,
                                              NB_Cluster,
                                              replace=False,
                                              p=Prob_Arr / np.sum(Prob_Arr))
                    SM_Arr[j] = choice
                    FIT[j] = Get_Fitness(SSMO_NET, choice, Alive_Node)
                    if LMAX < FIT[j]:
                        LMAX = FIT[j]
                        LLID_ARR[i] = j
            if LLMAX == LMAX:
                LLL += 1

        ## Global Leader Phase
        GLID = np.where(FIT == np.max(FIT))[0][0]
        for i in range(0, Swarm_Size - 1):

            GGLMAX = FIT[GLID]
            GLMAX = FIT[GLID]
            if i == GLID:
                continue
            Prob = 0.9 * (FIT[i] / FIT[GLID]) + 0.1
            if Prob > random():
                GL = SM_Arr[GLID]
                SM = SM_Arr[i]
                Rand = np.random.choice(Group0, 1)[0]
                SMR = SM_Arr[Rand]
                ARANGE = np.hstack([SM, GL, SMR])
                b = uniform(0, 1)
                d = uniform(-1, 1)
                PROBSM = np.ones(NB_Cluster) * (1 - b - d)
                PROBGL = np.ones(NB_Cluster) * (b)
                PROBSMR = np.ones(NB_Cluster) * (d)
                Prob_Arr = np.hstack([PROBSM, PROBGL, PROBSMR])
                Prob_Arr = np.exp(Prob_Arr) / np.sum(np.exp(Prob_Arr))
                choice = np.random.choice(ARANGE,
                                          NB_Cluster,
                                          replace=False,
                                          p=Prob_Arr / np.sum(Prob_Arr))
                SM_Arr[i] = choice
                FIT[i] = Get_Fitness(SSMO_NET, choice, Alive_Node)
                if FIT[i] > GLMAX:
                    GLMAX = FIT[i]
                    GLID = i
        if GLMAX == GGLMAX:
            GLL += 1

        ## Local Decision Phase
        # if LLL == MLLL:

        ## Global Decision Phase
        if GLL == MGLL:
            GLL = 0
            Group += 1
            Choice_Node = np.arange(0, Swarm_Size, 1)
            if Group == 2:
                Group0 = np.random.choice(Choice_Node,
                                          int(len(Choice_Node) / Group),
                                          replace=False)
                Choice_Node = list(set(Choice_Node) - set(Group0))
                Group1 = np.array(Choice_Node)
            if Group == 3:
                Group0 = np.random.choice(Choice_Node,
                                          int(len(Choice_Node) / Group),
                                          replace=False)
                Choice_Node = list(set(Choice_Node) - set(Group0))
                Group1 = np.random.choice(Choice_Node,
                                          int(len(Choice_Node) / Group),
                                          replace=False)
                Choice_Node = list(set(Choice_Node) - set(Group1))
                Group2 = np.array(Choice_Node)
            if Group == 4:
                Group0 = np.random.choice(Choice_Node,
                                          int(len(Choice_Node) / Group),
                                          replace=False)
                Choice_Node = list(set(Choice_Node) - set(Group0))
                Group1 = np.random.choice(Choice_Node,
                                          int(len(Choice_Node) / Group),
                                          replace=False)
                Choice_Node = list(set(Choice_Node) - set(Group1))
                Group2 = np.random.choice(Choice_Node,
                                          int(len(Choice_Node) / Group),
                                          replace=False)
                Choice_Node = list(set(Choice_Node) - set(Group2))
                Group3 = np.array(Choice_Node)
            if Group == 5:
                SSMO_CHID = SM_Arr[GLID]

    SSMO_CHID = SM_Arr[GLID]
    INNER = []
    OUTER = []
    RTBS = []
    for i in SSMO_CHID:
        RTBS.append(SSMO_NET.node[i]['RTBS'])
    CENTER = np.median(RTBS)
    for i in Alive_Node:
        if i in SSMO_CHID:
            if network.node[i]['RTBS'] > CENTER:
                OUTER.append(i)
                continue
            else:
                INNER.append(i)
                SSMO_NET.node[i]['Next'] = 0
                continue
        x1, y1 = SSMO_NET.node[i]['pos']
        NNID = 0
        NN_Dist = 1000
        for NN in SSMO_CHID:
            x2, y2 = SSMO_NET.node[NN]['pos']
            new_dist = math.sqrt((x1 - x2)**2 + (y1 - y2)**2)
            if new_dist < NN_Dist:
                NNID = NN
                NN_Dist = new_dist
        SSMO_NET.node[i]['Next'] = NNID

    for i in OUTER:
        NNID = 0
        NN = SSMO_NET.node[i]['RTBS']
        x, y = SSMO_NET.node[i]['pos']
        for j in INNER:
            x2, y2 = SSMO_NET.node[j]['pos']
            Dist = math.sqrt((x - x2)**2 + (y - y2)**2)
            if Dist < NN:
                NNID = j
                NN = Dist
        SSMO_NET.node[i]['Next'] = NNID

    if First == True:
        ## add_Edge
        for i in Alive_Node:
            SSMO_NET.add_edge(i, SSMO_NET.node[i]['Next'])

    return SSMO_NET, SSMO_CHID, R, In_Median
コード例 #10
0
def MST(G=nx.Graph()):
    i = 0  #contador para o passo a passo da geracao da MST
    Q = []  #matriz adjacencia que contem os pesos do grafo

    MST = nx.create_empty_copy(
        G)  #cria uma copia vazia (somente os vertices) do grafo dado

    #inicializa a matriz com valores infinitos
    for k in range(0, nx.number_of_nodes(G)):
        linha = []
        for l in range(0, nx.number_of_nodes(G)):
            linha.append(n.inf)
        Q.append(linha)

    #insere na matriz os pesos das arestas
    for v1, v2, data in G.edges(data=True):
        Q[int(v1)].pop(int(v2))
        Q[int(v1)].insert(int(v2), data['weight'])

    #verifica se existem ciclos e os retira
    for v3 in G.nodes():
        for v1 in G.nodes():
            for v2 in G.nodes():
                if Q[int(v1)][int(v3)] != n.inf and Q[int(v1)][int(
                        v2)] != n.inf and Q[int(v3)][int(v2)] != n.inf:
                    if Q[int(v1)][int(v3)] + Q[int(v3)][int(v2)] <= Q[int(v1)][
                            int(v2)]:
                        Q[int(v1)].pop(int(v2))
                        Q[int(v1)].insert(int(v2), n.inf)
                    elif Q[int(v1)][int(v3)] >= Q[int(v3)][int(v2)]:
                        Q[int(v2)].pop(int(v3))
                        Q[int(v2)].insert(int(v3), n.inf)
                    elif Q[int(v1)][int(v3)] <= Q[int(v3)][int(v2)]:
                        Q[int(v3)].pop(int(v2))
                        Q[int(v3)].insert(int(v2), n.inf)

    #varre a matriz e adiciona na MST as arestas seguras e que possuem menor peso
    for k in range(0, nx.number_of_nodes(G) - 1):
        #inicializa as variaves com valores que nao sao possiveis
        menor_peso = n.inf
        indice_menor = -1
        indice = -1
        #varre cada linha procurando o menor valor
        for j in range(0, nx.number_of_nodes(G)):
            aux = buscarMenor(
                Q[j])  #variavel que armazena o possivel menor peso
            if menor_peso > aux and aux != n.inf:  #caso o valor encontrado seja o menor, armazena ele na variavel menor_peso
                menor_peso = aux  #armazena o menor peso
                indice = Q[j].index(
                    menor_peso)  #armazena a coluna do menor peso
                indice_menor = j  #armazena a linha do menor peso
        MST.add_edge(
            indice_menor, indice,
            weight=menor_peso)  #adiciona a aresta com menor peso no grafo
        H[i] = MST.copy()  #faz uma copia do passo i para a lista de grafos
        i = i + 1  #incrementa i
        Q[indice_menor].pop(
            indice)  #retira da matriz o peso que acabou de ser adicionado
        Q[indice_menor].insert(indice,
                               -1)  #insere -1 na posicao do peso colocado
        Q[indice].pop(
            indice_menor
        )  #retira da matriz o peso que acabou de ser adicionado (nao é digrafo, por isso essa verificaçao é necessaria)
        Q[indice].insert(indice_menor,
                         -1)  #insere -1 na posicao do peso colocado
        #varre a cada linha da matriz
        for k in range(0, nx.number_of_nodes(G)):
            #antes de marcar o vertice como visitado, passa os valores dele na matriz de adjacencia para o vertice com que
            #ele tem uma aresta. no entanto, isso so acontece se o peso for menor.
            if Q[indice_menor][k] != -1 and Q[indice_menor][k] < Q[k][
                    indice_menor]:
                Q[k].pop(indice_menor)
                Q[k].insert(indice_menor, Q[indice_menor][k])
            Q[indice_menor].pop(
                k
            )  #marca o no como "visitado" colocando -1 em sua linha na matriz de adjacencia
            Q[indice_menor].insert(k, -1)
コード例 #11
0
ファイル: plot.py プロジェクト: dm4sec/BigMAC
def plot(G, name, prune=False, debug=False, focus_set=set(), edge_limit=None):
    import networkx as nx

    # NetworkX has a relationship with pygraphviz's AGraph
    # This is a wrapper around graphviz (binary/library)
    # The python graphviz library is separate
    import pygraphviz

    remove_edges = False

    nx.set_node_attributes(G, 'filled,solid', 'style')

    if prune:
        while True:
            to_remove = []
            for n in G.nodes():
                if n.startswith("process") or n.startswith("subject"):
                    continue

                ie = set(map(lambda x: x[0], list(G.in_edges(n))))
                oe = set(map(lambda x: x[1], list(G.out_edges(n))))
                total = len(ie | oe)

                if total <= 1:
                    to_remove += [n]

            if len(to_remove) == 0:
                break

            list(map(G.remove_node, to_remove))

    if len(focus_set):
        to_keep = []

        for center_node in sorted(list(focus_set)):
            node_focus = set([center_node])
            node_focus |= set(
                map(lambda x: x[0], list(G.in_edges(center_node))))
            node_focus |= set(
                map(lambda x: x[1], list(G.out_edges(center_node))))

            for node in list(node_focus):
                if node != center_node and (node.startswith("process")
                                            or node.startswith("subject")):
                    node_focus |= set(
                        map(lambda x: x[1], list(G.out_edges(node))))

            to_keep += [node_focus]

        from functools import reduce
        if len(to_keep) == 2:
            nodes_to_keep = (to_keep[0] & to_keep[1]) | focus_set
        else:
            nodes_to_keep = reduce(lambda x, y: x | y, to_keep)

        G = G.subgraph(list(nodes_to_keep))

    if edge_limit is not None and len(G.edges()) >= edge_limit:
        remove_edges = True

    if remove_edges:
        AG = nx.nx_agraph.to_agraph(nx.create_empty_copy(G))
    else:
        AG = nx.nx_agraph.to_agraph(G)

    if debug:
        from IPython import embed
        embed()

    # The SFDP program is extremely good at large graphs
    AG.layout(prog='sfdp')

    AG.draw(
        name,
        prog="sfdp",
        format='svg',
        args=
        '-Gsmoothing=rng -Goverlap=prism2000 -Goutputorder=edgesfirst -Gsep=+2'
    )
コード例 #12
0
def reduce_graph(G, M, N, draw=True):
    ''' G will be reduced to M-node,data server only, graph '''
    pos = nx.get_node_attributes(G, 'pos')
    ctr = find_center_node(G)[0]
    G.nodes[ctr]['wrk'] = 'd-ctr'

    # realize a logic to reduce the network based on find MST
    G = nx.minimum_spanning_tree(G)
    mst_ctr = find_center_node(G)[0]
    G.nodes[mst_ctr]['wrk'] = 's-ctr'

    all_nodes_list = list(G.nodes.data('wrk'))
    all_data_nodes = list()  # Get all the red nodes.
    for node in all_nodes_list:
        if node[1] == 's':
            all_data_nodes.append(node)
    # Since we already computed ctr (yellow) above, we can add that to the list
    all_data_nodes.append((ctr, 'd-ctr'))
    all_data_nodes_length = len(all_data_nodes)

    # NOTE, we might be able to use just this: multi_source_dijkstra_path(G, sources) Find shortest weighted paths in G from a given set of source nodes.
    # NOTE this might not work because its returning to all nodes. WE only care about red nodes.
    # all_shortest_paths = multi_source_dijkstra_path_length

    # Out of all the methods, dijsktra's path seems to be the one of most fit.

    #Once we connect i to j, we don't need to connect j to i (Its already there)
    all_shortest_paths = list()
    for i in range(0, all_data_nodes_length):
        for j in range(i, all_data_nodes_length):
            shortest_path_i_j = nx.dijkstra_path(G, all_data_nodes[i][0],
                                                 all_data_nodes[j][0])
            all_shortest_paths.append(shortest_path_i_j)

    nodes_in_new_graph = set()
    new_graph = nx.Graph()
    for path_of_nodes in all_shortest_paths:
        for node in path_of_nodes:
            # NOTE -- might not need this code anymore, but keeping her just cause.
            # Insert code for any other centers added.
            new_graph.add_node(node, wrk=G.nodes[node]['wrk'])
            nodes_in_new_graph.add(node)
            # print(node)
            # print(G.nodes[node]['wrk'])

    #Generate all edge pairs between red + shortest paths.
    # remove all edge pairs from current graph
    # we can create a copy: nx.create_empty_copy(G, with_data=True)
    new_graph_2 = nx.create_empty_copy(G)
    # add in "new" edge pairs.
    for path_of_nodes in all_shortest_paths:
        if len(
                path_of_nodes
        ) > 1:  #ie only save pathed nodes, cause we have list of just single red.
            for i in range(
                    0,
                    len(path_of_nodes) - 1
            ):  #Notice we stop one before because we are connecting i to i+1
                new_graph_2.add_edge(path_of_nodes[i], path_of_nodes[i + 1])

    # Get center of reduced graph
    temp_graph = new_graph_2.copy()
    temp_nodes = list(temp_graph.nodes)

    # If node is not connected to any other node, remove it from graph
    # Need to do this to be able to use find_center_node
    for node in temp_nodes:
        if temp_graph.degree[node] == 0:
            temp_graph.remove_node(node)

    reduced_mst_ctr = find_center_node(temp_graph)[0]
    new_graph_2.nodes[reduced_mst_ctr]['wrk'] = 'r-ctr'

    # #nodes_in_new_graph.sort()
    # # ### NOTE, this changes the type to a list
    # # nodes_in_new_graph = sorted(nodes_in_new_graph)
    # print(nodes_in_new_graph)
    # for i in range (0, N):
    #     if i not in nodes_in_new_graph:
    #         new_graph.add_node(i, wrk=G.nodes[i]['wrk'])

    # # After adding the nodes, we must add the edges.
    # for path_of_nodes in all_shortest_paths:
    #     if len(path_of_nodes) > 1: #ie only save pathed nodes, cause we have list of just single red.
    #         for i in range (0, len(path_of_nodes)-1): #Notice we stop one before because we are connecting i to i+1
    #             new_graph.add_edge(path_of_nodes[i], path_of_nodes[i+1])

    # # We are still mising the white nodes not in the path, and thus need to figure out which those are and add them.
    # G=new_graph

    G = new_graph_2

    weighted_edge_M_pairs = list()
    for shortest_path_i in all_shortest_paths:
        if len(shortest_path_i) > 1:
            i = 1
            red_found = False
            while not red_found:
                ## Insert logic for any center nodes added.
                if G.nodes[shortest_path_i[i]]['wrk'] == 's' or G.nodes[
                        shortest_path_i[i]]['wrk'] == 'd-ctr' or G.nodes[
                            shortest_path_i[i]]['wrk'] == 'r-ctr':
                    red_found = True
                    # print(i)
                    # print(shortest_path_i[i])
                else:
                    i += 1
            weighted_edge_M_pairs.append(
                (shortest_path_i[0], shortest_path_i[i], i))

    print(weighted_edge_M_pairs)

    # New graph with M connected nodes only, as well as weights added in.
    m_node_graph = nx.create_empty_copy(G)
    for weighted_edge_M_pair in weighted_edge_M_pairs:
        m_node_graph.add_edge(weighted_edge_M_pair[0],
                              weighted_edge_M_pair[1],
                              weight=weighted_edge_M_pair[2])
    #
    m_node_graph = nx.minimum_spanning_tree(m_node_graph)

    if draw:  # draw an original graph with a network center
        plt1 = plt.figure(figsize=(15, 15))
        colors = set_node_colors(G)
        nx.draw_networkx_nodes(G,
                               pos,
                               node_size=160,
                               node_color=colors,
                               edgecolors='gray',
                               cmap=plt.cm.Reds_r)
        nx.draw_networkx_edges(G, pos, alpha=0.2)
        labels = {}
        for n in range(G.order()):
            labels[n] = str(n)
        nx.draw_networkx_labels(G, pos, labels, font_size=10)

        plt2 = plt.figure(figsize=(15, 15))
        colors = set_node_colors(m_node_graph)
        nx.draw_networkx_nodes(m_node_graph,
                               pos,
                               node_size=160,
                               node_color=colors,
                               edgecolors='gray',
                               cmap=plt.cm.Reds_r)
        labels = nx.get_edge_attributes(m_node_graph, 'weight')
        # formatted_labels = {}
        # for label in labels:
        #     formatted_labels[label]=  "weight: "+str(label[1])
        nx.draw_networkx_edge_labels(m_node_graph, pos, edge_labels=labels)
        nx.draw_networkx_edges(m_node_graph, pos)
        labels = {}
        for n in range(m_node_graph.order()):
            labels[n] = str(n)
        nx.draw_networkx_labels(m_node_graph, pos, labels, font_size=10)

    if draw:
        plt.xlim(-0.05, 1.05)
        plt.ylim(-0.05, 1.05)
        # plt.axis('off')
        plt.show(block=False)
    return G
コード例 #13
0
def generate_topology(n_servers, n_switches, n_ports, debug=False):

    if debug:
        random.seed(RNG_SEED)

    sys.stdout.write(
        "Generating Fellyjish topology: %d servers, %d switches, %d ports per switch..."
        % (n_servers, n_switches, n_ports))

    topo = {}

    G = nx.Graph()
    topo["graph"] = G
    topo["n_ports"] = n_ports

    topo["n_hosts"] = n_servers
    for s in range(n_servers):
        G.add_node('h' + str(s), ip='10.0.' + str(s) + '.1')

    topo["n_switches"] = n_switches
    outport_mappings = {}
    open_ports = [n_ports] * n_switches

    for sw in range(n_switches):
        curr_switch = 's' + str(sw)
        G.add_node(curr_switch)

        i = sw
        while i < n_servers:
            G.add_edge('h' + str(i), curr_switch)
            outport_mappings[(curr_switch, 'h' + str(i))] = open_ports[sw]
            outport_mappings[('h' + str(i), curr_switch)] = 1
            i += n_switches
            open_ports[sw] -= 1

    start_open_ports = copy.deepcopy(open_ports)

    topo['outport_mappings'] = outport_mappings
    # randomly link the remaining open ports
    links = defaultdict(list)
    while sum(open_ports) > 1:
        open_switches = [x for x in range(n_switches) if open_ports[x] > 0]
        if len(open_switches
               ) == 1:  # special case with two ports remaining on same switch
            curr = open_switches[0]
            if open_ports[curr] >= 2:
                other_switches = [s for s in range(n_switches) if s != curr]
                x = 's' + str(random.choice(other_switches))
                y = random.choice(list(nx.all_neighbors(G, x)))
                G.remove_edge(x, y)
                x_port = outport_mappings.pop((x, y))
                y_port = outport_mappings.pop((y, x))
                G.add_edge(x, 's' + str(curr))
                G.add_edge(y, 's' + str(curr))
                outport_mappings[(x, 's' + str(curr))] = x_port
                outport_mappings[('s' + str(curr), x)] = open_ports[curr]
                outport_mappings[(y, 's' + str(curr))] = y_port
                outport_mappings[('s' + str(curr), y)] = open_ports[curr] - 1
                open_ports[curr] -= 2
                continue

        start_over = False
        while True:
            x = random.choice(open_switches)
            x_name = 's' + str(x)
            unconnected_switches = [
                s for s in open_switches
                if ('s' + str(s) not in list(G.neighbors(x_name)) and s != x)
            ]
            if len(unconnected_switches) == 0:
                no_new_links = True
                for os in open_switches:
                    for os2 in open_switches:
                        if os != os2:
                            no_new_links = no_new_links and 's' + str(
                                os2) in list(G.neighbors('s' + str(os)))
                if no_new_links:
                    start_over = True
                    break
            else:
                break

        if start_over:
            open_ports = copy.deepcopy(start_open_ports)
            G = nx.create_empty_copy(G)
            continue

        y = random.choice(unconnected_switches)
        open_ports[x] -= 1
        open_ports[y] -= 1
        G.add_edge('s' + str(x), 's' + str(y))
        outport_mappings[('s' + str(x), 's' + str(y))] = open_ports[x] + 1
        outport_mappings[('s' + str(y), 's' + str(x))] = open_ports[y] + 1

    sys.stdout.write(" done\n")
    return topo
コード例 #14
0
ファイル: assembly.py プロジェクト: joskid/pydna
    def __init__(self, frags=None, limit=25, algorithm=common_sub_strings):

        # Fragments is a string subclass with some extra properties
        # The order of the fragments has significance
        fragments = []
        for f in frags:
            fragments.append(
                {
                    "upper": str(f.seq).upper(),
                    "mixed": str(f.seq),
                    "name": f.name,
                    "features": f.features,
                    "nodes": [],
                }
            )

        # rcfragments is a dict with fragments as keys and the reverse
        # complement as value
        rcfragments = dict(
            (
                f["mixed"],
                {
                    "upper": str(frc.seq).upper(),
                    "mixed": str(frc.seq),
                    "name": frc.name,
                    "features": frc.features,
                    "nodes": [],
                },
            )
            for f, frc in zip(fragments, (f.rc() for f in frags))
        )
        # The nodemap dict holds nodes and their reverse complements
        nodemap = {
            "begin": "end",
            "end": "begin",
            "begin_rc": "end_rc",
            "end_rc": "begin_rc",
        }

        # all combinations of fragments are compared.
        # see https://docs.python.org/3.6/library/itertools.html
        # itertools.combinations('ABCD', 2)-->  AB AC AD BC BD CD
        for first, secnd in _itertools.combinations(fragments, 2):

            if first["upper"] == secnd["upper"]:
                continue

            firrc = rcfragments[first["mixed"]]
            secrc = rcfragments[secnd["mixed"]]

            # matches is a list of tuples of three integers describing
            # overlapping sequences:
            # (start position in first, start position in secnd, length)
            # This comparison is done using uppercase strings, see _
            # Fragment class
            matches = algorithm(first["upper"], secnd["upper"], limit)

            for start_in_first, start_in_secnd, length in matches:
                # node is a string and represent the shared sequence in upper
                # case.
                node = first["upper"][start_in_first : start_in_first + length]

                first["nodes"].append((start_in_first, length, node))
                secnd["nodes"].append((start_in_secnd, length, node))

                # The same node exists between the reverse complements of
                # first and secnd
                # The new positions are calculated from the length of the
                # fragment and
                # the overlapping sequence
                start_in_firrc = len(first["upper"]) - start_in_first - length
                start_in_secrc = len(secnd["upper"]) - start_in_secnd - length
                # noderc is the reverse complement of node
                noderc = firrc["upper"][start_in_firrc : start_in_firrc + length]
                firrc["nodes"].append((start_in_firrc, length, noderc))
                secrc["nodes"].append((start_in_secrc, length, noderc))
                nodemap[node] = noderc

            # first is also compared to the rc of secnd
            matches = algorithm(first["upper"], secrc["upper"], limit)

            for start_in_first, start_in_secrc, length in matches:
                node = first["upper"][start_in_first : start_in_first + length]
                first["nodes"].append((start_in_first, length, node))
                secrc["nodes"].append((start_in_secrc, length, node))

                start_in_firrc, start_in_secnd = (
                    len(first["upper"]) - start_in_first - length,
                    len(secnd["upper"]) - start_in_secrc - length,
                )
                noderc = firrc["upper"][start_in_firrc : start_in_firrc + length]
                firrc["nodes"].append((start_in_firrc, length, noderc))
                secnd["nodes"].append((start_in_secnd, length, noderc))
                nodemap[node] = noderc

        # A directed graph class that can store multiedges.
        # Multiedges are multiple edges between two nodes. Each edge can hold
        # optional data or attributes.
        # https://networkx.github.io/documentation/stable/reference/classes/
        # multidigraph.html

        order = 0
        G = _nx.MultiDiGraph()
        # loop through all fragments their and reverse complements

        for f in fragments:
            f["nodes"] = sorted(set(f["nodes"]))

        for f in rcfragments.values():
            f["nodes"] = sorted(set(f["nodes"]))

        for f in _itertools.chain(fragments, rcfragments.values()):

            # nodes are sorted in place in the order of their position
            # duplicates are removed (same position and sequence)
            # along the fragment since nodes are a tuple (position(int),
            # sequence(str))

            before = G.order()
            G.add_nodes_from(
                (node, {"order": order + od, "length": length})
                for od, (start, length, node) in enumerate(
                    n for n in f["nodes"] if n[2] not in G
                )
            )
            order += G.order() - before

            for (start1, length1, node1), (
                start2,
                length2,
                node2,
            ) in _itertools.combinations(f["nodes"], 2):

                feats = [
                    ft
                    for ft in f["features"]
                    if start1 <= ft.location.start
                    and start2 + G.nodes[node2]["length"] >= ft.location.end
                ]

                for feat in feats:
                    feat.location += -start1

                G.add_edge(
                    node1,
                    node2,  # nodes (strings)
                    piece=slice(start1, start2),  # slice
                    features=feats,  # features
                    seq=f["mixed"],  # mixed case string
                    name=f["name"],
                )  # string

        self.G = _nx.create_empty_copy(G)
        self.G.add_edges_from(
            sorted(
                G.edges(data=True), key=lambda t: len(t[2].get("seq", 1)), reverse=True
            )
        )
        self.nodemap = {**nodemap, **{nodemap[i]: i for i in nodemap}}
        self.limit = limit
        self.fragments = fragments
        self.rcfragments = rcfragments
        self.algorithm = algorithm
コード例 #15
0
def make_graph(names, organization, gx=None):
    '''
    Creates/Updates a dependency graph based on names of packages.
    The dependency graph is used to decide which packages
    need to be upgraded before others.

    Parameters
    ----------
    names: list
        List of package names for placement into the graph.
    organization: str
        Name of GitHub organization containing feedstock repos.
    gx: nx.DiGraph, optional
        Dependency graph to be updated.

    Returns
    -------
    gx: nx.DiGraph()
        New/Updated dependency graph displaying the relationships
        between packages listed in names.
    '''
    from conda_forge_tick.utils import LazyJson
    logger.info("reading graph")
    if gx is None:
        print('Creating graph from scratch...')
        gx = nx.DiGraph()
    else:
        print('Updating graph with new packages...')
    new_names = [name for name in names if name not in gx.nodes]
    old_names = [name for name in names if name in gx.nodes]
    assert gx is not None
    old_names = sorted(old_names, key=lambda n: gx.nodes[n].get("time", 0))
    total_names = new_names + old_names
    logger.info("start feedstock fetch loop")
    print('Fetching feedstock attributes...')

    builder = _build_graph_sequential if DEBUG else _build_graph_process_pool
    builder(gx, total_names, new_names, organization)
    logger.info("feedstock fetch loop completed")
    print('Finished fetching feedstock attributes')

    gx2 = deepcopy(gx)
    logger.info("inferring nodes and edges")
    print('Creating nodes and edges...')
    # make the outputs look up table so we can link properly
    outputs_lut = {
        k: node_name
        for node_name, node in gx.nodes.items()
        for k in node.get("payload", {}).get("outputs_names", [])
    }
    # add this as an attr so we can use later
    gx.graph["outputs_lut"] = outputs_lut
    strong_exports = {
        node_name
        for node_name, node in gx.nodes.items()
        if node.get("payload").get("strong_exports", False)
    }
    # This drops all the edge data and only keeps the node data
    gx = nx.create_empty_copy(gx)
    # TODO: label these edges with the kind of dep they are and their platform
    for node, node_attrs in gx2.nodes.items():
        with node_attrs["payload"] as attrs:
            # replace output package names with feedstock names via LUT
            deps = set(
                map(
                    lambda x: outputs_lut.get(x, x),
                    set().union(*attrs.get("requirements", {}).values()),
                ))

            # handle strong run exports
            overlap = deps & strong_exports
            requirements = attrs.get("requirements")
            if requirements:
                requirements["host"].update(overlap)
                requirements["run"].update(overlap)

        for dep in deps:
            if dep not in gx.nodes:
                # for packages which aren't feedstocks and aren't outputs
                # usually these are stubs
                lzj = LazyJson(f"node_attrs/{dep}.json")
                lzj.update(feedstock_name=dep, bad=False, archived=True)
                gx.add_node(dep, payload=lzj)
            gx.add_edge(dep, node)
    logger.info("new nodes and edges infered")
    print('Dependency graph complete')
    return gx
コード例 #16
0
 def resetGroupEdges(self):
     assert self.group_graph is not None and self.group_adj is not None, "Group Graph not instantiated"
     self.group_graph = nx.create_empty_copy(self.group_graph, with_data=True)
     group_edges = adjMatrixEdges(self.group_adj, list(self.group_graph.nodes))
     for u, v, weight in group_edges:
         self.group_graph.add_edge(u, v, weight=weight)
コード例 #17
0
 def set_empty_graphs(self):
     self.empty_design_graph = nx.create_empty_copy(self.design_graph)
     self.empty_dw_graph = nx.create_empty_copy(self.dw_graph)
コード例 #18
0
ファイル: SSMO_BACKUP.py プロジェクト: lee-jingu/SSMOECHS
def Optimizer(network,
              Alive_Node,
              Update=False,
              R=30,
              In_Median=30,
              First=False):
    SSMO_NET = nx.create_empty_copy(network)
    SSMO_CHID = []
    NB_Cluster = max(round(cf.P_CH * len(Alive_Node)), 1)

    update = 0
    if Update == True:
        Rmax = 0
        for i in Alive_Node:
            x, y = SSMO_NET.node[i]['pos']
            R_tmp = math.sqrt(
                ((x - cf.AREA_W / 2)**2 + (y - cf.AREA_H / 2)**2) / NB_Cluster)
            if R_tmp > Rmax:
                Rmax = R_tmp
            if Rmax != R:
                R = Rmax
                update = 1

    if update == 1:
        for i in Alive_Node:
            SSMO_NET.node[i]['Cover'] = []
            SSMO_NET.node[i]['Dist'] = []
            for j in Alive_Node:
                if i == j:
                    continue
                x1, y1 = SSMO_NET.node[i]['pos']
                x2, y2 = SSMO_NET.node[j]['pos']
                D = math.sqrt((x1 - x2)**2 + (y1 - y2)**2)
                if D < R:
                    SSMO_NET.node[i]['Cover'].append(j)
                    SSMO_NET.node[i]['Dist'].append(D)

    ## Initializing Phase
    SM_Arr = []
    MG = 5
    MIR = 100
    Swarm_Size = 40
    FIT = np.zeros(Swarm_Size)
    FIT1 = np.zeros(Swarm_Size)
    FIT2 = np.zeros(Swarm_Size)
    MGLL = 20
    MLLL = 10
    Group0 = []
    Group1 = []
    Group2 = []
    Group3 = []

    for i in range(0, Swarm_Size):
        choice = np.random.choice(Alive_Node, NB_Cluster, replace=False)
        SM_Arr.append(choice)
        Group0.append(i)

    # NET_MAX = Get_MAX(SSMO_NET,SM_Arr,R,In_Median)
    NET_MAX = 0
    for i in range(0, Swarm_Size):
        f1, f2 = Get_Fitness(SSMO_NET, SM_Arr[i], R, In_Median, NET_MAX,
                             Alive_Node)
        FIT1[i] = f1
        FIT2[i] = f2
    FIT1MAX = np.max(FIT1)
    if FIT1MAX > 0:
        FIT = FIT1 / FIT1MAX + FIT2
    else:
        FIT = FIT1 + FIT2

    Group = 1
    GLID = np.where(FIT == np.max(FIT))[0][0]
    LLID_arr = np.zeros(MG, dtype=np.int32)
    LLL = np.zeros(MG, dtype=np.int32)
    Pr = 0.1
    GLL = 0
    for Iter in range(0, MIR):
        ## Local Leader Phase
        Pr += (0.4 - 0.1) / MIR
        for i in range(0, Group):
            if i == 0:
                temp = Group0
            if i == 1:
                temp = Group1
            if i == 2:
                temp = Group2
            if i == 3:
                temp = Group3

            MAXFIT = 0
            LLID = LLID_arr[i]
            LLMAX = FIT[LLID]
            LMAX = FIT[LLID]
            MAXFIT = FIT[LLID]

            Prob_Arr = np.zeros(len(Alive_Node))
            for j in temp:
                if j in LLID_arr:
                    continue
                if j == GLID:
                    continue

                if random() < Pr:
                    LL = SM_Arr[LLID]
                    SM = SM_Arr[j]
                    Rand = np.random.choice(temp, 1)[0]
                    SMR = SM_Arr[Rand]
                    ARANGE = np.hstack([SM, LL, SMR])
                    b = uniform(0, 1)
                    d = uniform(-1, 1)
                    PROBSM = np.ones(len(SM)) * (1 - b - d)
                    PROBLL = np.ones(len(LL)) * (b)
                    PROBSMR = np.ones(len(SMR)) * (d)
                    Prob_Arr = np.hstack([PROBSM, PROBLL, PROBSMR])
                    Prob_Arr = np.exp(Prob_Arr) / np.sum(np.exp(Prob_Arr))
                    choice = list(
                        set(
                            np.random.choice(ARANGE,
                                             NB_Cluster,
                                             replace=False,
                                             p=Prob_Arr / np.sum(Prob_Arr))))
                    SM_Arr[j] = choice
                    FIT1[j], FIT2[j] = Get_Fitness(SSMO_NET, choice, R,
                                                   In_Median, FIT[LLID],
                                                   Alive_Node)
                    FIT[j] = FIT1[j] / FIT1MAX + FIT2[j]
                    if LMAX < FIT[j]:
                        LMAX = FIT[j]
                        LLID_arr[i] = j
            if LLMAX == LMAX:
                LLL[i] += 1
            if LLL[i] == MLLL:
                LLL[i] = 0
                for j in temp:
                    if j in LLID_arr:
                        continue
                    if j == GLID:
                        continue
                    if random() < Pr:
                        LL = SM_Arr[LLID]
                        GL = SM_Arr[GLID]
                        SM = SM_Arr[j]
                        ARANGE = np.hstack([SM, LL, GL])
                        b = uniform(0, 1)
                        PROBSM = np.ones(len(SM)) * (1 - 2 * b)
                        PROBLL = np.ones(len(LL)) * (b)
                        PROBGL = np.ones(len(GL)) * (b)
                        Prob_Arr = np.hstack([PROBSM, PROBLL, PROBGL])
                        Prob_Arr = np.exp(Prob_Arr) / np.sum(np.exp(Prob_Arr))
                        choice = list(
                            set(
                                np.random.choice(ARANGE,
                                                 NB_Cluster,
                                                 replace=False,
                                                 p=Prob_Arr /
                                                 np.sum(Prob_Arr))))
                    else:
                        choice = np.random.choice(Alive_Node,
                                                  NB_Cluster,
                                                  replace=False)
                    SM_Arr[j] = choice
                    FIT1[j], FIT2[j] = Get_Fitness(SSMO_NET, choice, R,
                                                   In_Median, FIT[LLID],
                                                   Alive_Node)
                    FIT[j] = FIT1[j] / FIT1MAX + FIT2[j]
                    if LMAX < FIT[j]:
                        LMAX = FIT[j]
                        LLID_arr[i] = j

        ## Global Leader Phase
        if GLID >= Swarm_Size:
            print(GLID)
        for i in range(0, Swarm_Size - 1):
            GGLMAX = FIT[GLID]
            GLMAX = FIT[GLID]
            if i == GLID:
                continue
            if i in LLID_arr:
                continue

            Prob = 0.9 * (FIT[i] / FIT[GLID]) + 0.1
            if Prob > random():
                GL = SM_Arr[GLID]
                SM = SM_Arr[i]
                Rand = np.random.choice(Group0, 1)[0]
                SMR = SM_Arr[Rand]
                ARANGE = np.hstack([SM, GL, SMR])
                b = uniform(0, 1)
                d = uniform(-1, 1)
                PROBSM = np.ones(len(SM)) * (1 - b - d)
                PROBGL = np.ones(len(GL)) * (b)
                PROBSMR = np.ones(len(SMR)) * (d)
                Prob_Arr = np.hstack([PROBSM, PROBGL, PROBSMR])
                Prob_Arr = np.exp(Prob_Arr) / np.sum(np.exp(Prob_Arr))
                choice = list(
                    set(
                        np.random.choice(ARANGE,
                                         NB_Cluster,
                                         replace=False,
                                         p=Prob_Arr / np.sum(Prob_Arr))))
                FIT1[i], FIT2[i] = Get_Fitness(SSMO_NET, choice, R, In_Median,
                                               FIT[LLID], Alive_Node)
                FIT[i] = FIT1[i] / FIT1MAX + FIT2[i]
                if FIT[i] > GLMAX:
                    GLMAX = FIT[i]
                    GLID = i
        if GLMAX == GGLMAX:
            GLL += 1

        ## Local Decision Phase

        ## Global Decision Phase
        if GLL == MGLL:
            GLL = 0
            Group += 1
            Choice_Node = np.arange(0, Swarm_Size, 1)
            if Group == 2:
                Group0 = np.random.choice(Choice_Node,
                                          int(Swarm_Size / Group),
                                          replace=False)
                Choice_Node = list(set(Choice_Node) - set(Group0))
                Group1 = np.array(Choice_Node)
            if Group == 3:
                Group0 = np.random.choice(Choice_Node,
                                          int(Swarm_Size / Group),
                                          replace=False)
                Choice_Node = list(set(Choice_Node) - set(Group0))
                Group1 = np.random.choice(Choice_Node,
                                          int(Swarm_Size / Group),
                                          replace=False)
                Choice_Node = list(set(Choice_Node) - set(Group1))
                Group2 = np.array(Choice_Node)
            if Group == 4:
                Group0 = np.random.choice(Choice_Node,
                                          int(Swarm_Size / Group),
                                          replace=False)
                Choice_Node = list(set(Choice_Node) - set(Group0))
                Group1 = np.random.choice(Choice_Node,
                                          int(Swarm_Size / Group),
                                          replace=False)
                Choice_Node = list(set(Choice_Node) - set(Group1))
                Group2 = np.random.choice(Choice_Node,
                                          int(Swarm_Size / Group),
                                          replace=False)
                Choice_Node = list(set(Choice_Node) - set(Group2))
                Group3 = np.array(Choice_Node)
            if Group == 5:
                SSMO_CHID = SM_Arr[GLID]

    SSMO_CHID = SM_Arr[GLID]
    for i in Alive_Node:
        if i in SSMO_CHID:
            SSMO_NET.node[i]['Next'] = 0
            continue
        x1, y1 = SSMO_NET.node[i]['pos']
        NNID = 0
        NN_Dist = 1000
        for NN in SSMO_CHID:
            x2, y2 = SSMO_NET.node[NN]['pos']
            new_dist = math.sqrt((x1 - x2)**2 + (y1 - y2)**2)
            if new_dist < NN_Dist:
                NNID = NN
                NN_Dist = new_dist
        SSMO_NET.node[i]['Next'] = NNID

    # for i in OUTER:
    #     NNID = 0
    #     NN = SSMO_NET.node[i]['RTBS']
    #     x,y = SSMO_NET.node[i]['pos']
    #     for j in INNER:
    #         x2,y2 = SSMO_NET.node[j]['pos']
    #         Dist = math.sqrt((x-x2)**2+(y-y2)**2)
    #         if Dist < NN:
    #             NNID = j
    #             NN = Dist
    #     SSMO_NET.node[i]['Next'] = NNID

    if First == True:
        ## add_Edge
        for i in Alive_Node:
            if i in SSMO_CHID:
                continue
            SSMO_NET.add_edge(i, SSMO_NET.node[i]['Next'])

    return SSMO_NET, SSMO_CHID, R, In_Median
コード例 #19
0
ファイル: KSMO.py プロジェクト: lee-jingu/SSMOECHS
def Optimizer(network,
              Alive_Node,
              Residual=False,
              R=30,
              In_Median=30,
              First=False):
    KSMO_NET = nx.create_empty_copy(network)
    KSMO_CHID = []

    ## find kriging map z
    z = Kriging(KSMO_NET, KSMO_CHID, R, In_Median, Plot=False)

    ## Initializing Phase
    CH = []
    FIT = []
    MG = 4
    MIR = 10
    Swarm_Size = 40
    NB_Cluster = round(cf.P_CH * len(Alive_Node))
    if NB_Cluster == 0:
        NB_Cluster = 1
    MGLL = 3
    MLLL = 20
    Group0 = []
    Group1 = []
    Group2 = []
    Group3 = []
    for i in range(0, Swarm_Size):
        pos = np.random.uniform(low=0, high=cf.AREA_H, size=(NB_Cluster, 2))
        CH.append(pos)
        FIT.append(Get_Fitness(KSMO_NET, CH[i], Alive_Node, z))
    Group = 1
    GLID = np.where(FIT == np.min(FIT))[0][0]
    LLID = np.where(FIT == np.min(FIT))[0][0]
    Pr = 0.1
    Group0 = np.arange(0, len(CH), 1)

    for Iter in range(0, MIR):
        GLL = 0
        LLL = 0

        ## Local Leader Phase
        Pr += (0.4 - 0.1) / MIR
        for i in range(0, Group):
            LLMAX = FIT[LLID]
            LMAX = FIT[LLID]
            if i == 0:
                temp = Group0
            if i == 1:
                temp = Group1
            if i == 2:
                temp = Group2
            if i == 3:
                temp = Group3
            if i == LLID:
                continue
            for T in temp:
                if Pr > random():
                    SM = CH[T]
                    LL = CH[LLID]
                    SMR = CH[randint(0, len(CH) - 1)]
                    b = uniform(0, 1)
                    d = 0
                    for j in range(0, len(SM)):
                        x1, y1 = SM[j]
                        x2, y2 = LL[j]
                        x3, y3 = SMR[j]
                        X_pot = x1 + b * (x2 - x1) + d * (x3 - x1)
                        Y_pot = y1 + b * (y2 - y1) + d * (y3 - y1)
                        SM[j] = [X_pot, Y_pot]
                    CH[T] = SM
                    FIT[T] = Get_Fitness(KSMO_NET, CH[T], Alive_Node, z)
                    if FIT[T] < LLMAX:
                        LLMAX = FIT[T]
                        LLID = T

                    if LLMAX == LMAX:
                        LLL += 1

        GLID = np.where(FIT == np.min(FIT))[0][0]
        ## Global Leader Phase
        for i in range(0, len(CH)):
            GGLMAX = FIT[GLID]
            GLMAX = FIT[GLID]
            if i == GLID:
                continue

            Prob = 0.9 * (FIT[i] / FIT[GLID]) + 0.1
            if Prob > random():
                GL = CH[GLID]
                SM = CH[i]
                SMR = CH[randint(0, len(CH) - 1)]
                b = uniform(0, 1)
                d = 0
                for j in range(0, len(SM)):
                    x1, y1 = SM[j]
                    x2, y2 = GL[j]
                    x3, y3 = SMR[j]
                    X_pot = x1 + b * (x2 - x1) + d * (x3 - x1)
                    Y_pot = y1 + b * (y2 - y1) + d * (y3 - y1)
                    SM[j] = [X_pot, Y_pot]
                CH[i] = SM
                FIT[i] = Get_Fitness(KSMO_NET, CH[i], Alive_Node, z)
                if FIT[i] < GLMAX:
                    GLMAX = FIT[i]
                    GLID = i
        if GLMAX == GGLMAX:
            GLL += 1

        ## Local Decision Phase
        # if LLL == MLLL:

        ## Global Decision Phase

    for i in range(0, len(CH[GLID])):
        x, y = CH[GLID][i]
        CHID = 0
        NNDist = 1000
        for j in Alive_Node:
            x2, y2 = KSMO_NET.node[j]['pos']
            NewDist = math.sqrt((x - x2)**2 + (y - y2)**2)
            if NNDist > NewDist:
                NNDist = NewDist
                CHID = j
        if CHID in KSMO_CHID:
            continue
        KSMO_CHID.append(CHID)

    if GLL == MGLL:
        Group += 1
        Choice_Node = copy.deepcopy(Alive_Node)
        if Group == 2:
            Group0 = np.random.choice(Choice_Node,
                                      int(len(Alive_Node) / Group),
                                      replace=False)
            Group1 = np.random.choice(Choice_Node,
                                      int(len(Alive_Node) / Group),
                                      replace=False)
            MGLL = 0
        if Group == 3:
            Group0 = np.random.choice(Choice_Node,
                                      int(len(Alive_Node) / Group),
                                      replace=False)
            Group1 = np.random.choice(Choice_Node,
                                      int(len(Alive_Node) / Group),
                                      replace=False)
            Group2 = np.random.choice(Choice_Node,
                                      int(len(Alive_Node) / Group),
                                      replace=False)
            MGLL = 0
        if Group == 4:
            Group0 = np.random.choice(Choice_Node,
                                      int(len(Alive_Node) / Group),
                                      replace=False)
            Group1 = np.random.choice(Choice_Node,
                                      int(len(Alive_Node) / Group),
                                      replace=False)
            Group2 = np.random.choice(Choice_Node,
                                      int(len(Alive_Node) / Group),
                                      replace=False)
            Group3 = np.random.choice(Choice_Node,
                                      int(len(Alive_Node) / Group),
                                      replace=False)
            MGLL = 0
        if Group == 5:
            KSMO_CHID = CH[GLID]
    ## Clustering
    for i in Alive_Node:
        if i in KSMO_CHID:
            KSMO_NET.node[i]['Next'] = 0
        else:
            NNDist = 1000
            CH_ARR = np.zeros(len(KSMO_CHID))
            x, y = KSMO_NET.node[i]['pos']
            COUNT_ARR = np.ones(len(KSMO_CHID))
            for j in range(0, len(KSMO_CHID)):
                x2, y2 = KSMO_NET.node[KSMO_CHID[j]]['pos']
                RES = KSMO_NET.node[KSMO_CHID[j]]['res_energy']
                dist = math.sqrt((x - x2)**2 + (y - y2)**2)
                if dist == 0:
                    continue
                dis2 = math.sqrt((x2 - 50)**2 + (y2 - 50)**2)
                CH_ARR[j] = RES / (dist * dis2 * COUNT_ARR[j])
            idx = np.where(CH_ARR == np.max(CH_ARR))[0][0]
            COUNT_ARR[idx] += 1
            KSMO_NET.node[i]['Next'] = KSMO_CHID[idx]

    if First == True:
        ## add_Edge
        for i in Alive_Node:
            KSMO_NET.add_edge(i, KSMO_NET.node[i]['Next'])

    return KSMO_NET, KSMO_CHID, R
コード例 #20
0
def randomize_graph(graph, randomization_type, allow_self_edges=True):
    """
    Creates a random network from given network as a networkx graph
    randomization_type: 
        - "random": add same number of edges randomly between nodes of original graph
        - "preserve_topology": keep edges, shuffle nodes of original graph
        - "preserve_topology_and_node_degree": keep edges, shuffle nodes of original graph with the nodes of same degree
        - "preserve_degree_distribution": remove an edge between two random nodes with degrees k, l then add to two nodes with degrees k-1 & l-1, then shuffle nodes
        - "preserve_degree_distribution_and_node_degree": remove 2 random edges between a-b and c-d where degree(a)=degree(c) and degree(b)=degree(d) then add 2 edges between a-d and b-c, then shuffle nodes with the same degree
	- "erdos_renyi": creates a graph where edges are redistributed based on erdos renyi random model. 
	- "barabasi_albert": creates a graph where edges are redistributed based on barabasi albert model (preferential attachment). 
    """

    debug = False

    n_node = graph.number_of_nodes()
    n_edge = graph.number_of_edges()

    if randomization_type == "erdos_renyi":
        #raise Exception("Work in progress")
        p = float(2 * n_edge) / (n_node * n_node - 2 * n_node)
        # Chooses each of the possible [n(n-1)]/2 edges with probability p
        new_graph = networkx.erdos_renyi_graph(n_node, p)
        mapping = dict(zip(new_graph.nodes(), graph.nodes()))
        new_graph = networkx.relabel_nodes(new_graph, mapping)
        available_edges = graph.edges()

        # Map graph from random model to new graph
        for edge in new_graph.edges():
            if len(available_edges) > 0:
                edge_org = available_edges.pop()
                if debug:
                    print "From random:", (edge[0], edge[1])
                new_graph.add_edge(
                    edge[0], edge[1],
                    graph.get_edge_data(edge_org[0], edge_org[1]))
            # If the random model added too many edges
            else:
                if debug:
                    print "Removing:", edge
                new_graph.remove_edge(edge[0], edge[1])

        # If the random model failed to add enough edges
        nodes = new_graph.nodes()
        for edge_org in available_edges:
            source_id = random.choice(nodes)
            target_id = random.choice(nodes)
            while new_graph.has_edge(
                    source_id, target_id) or (not allow_self_edges
                                              and source_id == target_id):
                source_id = random.choice(nodes)
                target_id = random.choice(nodes)
            if debug:
                print "Adding:", (source_id, target_id)
            new_graph.add_edge(source_id, target_id,
                               graph.get_edge_data(edge_org[0], edge_org[1]))
        return new_graph

    if randomization_type == "barabasi_albert":
        #raise Exception("Work in progress")
        if n_edge >= n_node:
            # A graph of n nodes is grown by attaching new nodes each with m edges that are preferentially attached to existing nodes with high degree
            new_graph = networkx.barabasi_albert_graph(n_node, n_edge / n_node)
            mapping = dict(zip(new_graph.nodes(), graph.nodes()))
            new_graph = networkx.relabel_nodes(new_graph, mapping)
        else:
            new_graph = networkx.create_empty_copy(graph)

        available_edges = graph.edges()
        degree_map = new_graph.degree()
        nodes = new_graph.nodes()

        # Map graph from random model to new graph
        for edge in new_graph.edges():
            if len(available_edges) > 0:
                edge_org = available_edges.pop()
                if debug:
                    print "From random:", (edge[0], edge[1])
                new_graph.add_edge(
                    edge[0], edge[1],
                    graph.get_edge_data(edge_org[0], edge_org[1]))
            # If the random model added too many edges
            else:
                nodes_to_select = [
                    id for id, d in degree_map.items() for j in xrange(d + 1)
                ]
                source_id = random.choice(nodes())
                target_id = random.choice(nodes_to_select)
                if debug:
                    print "Removing:", (source_id, target_id)
                new_graph.remove_edge(source_id, target_id)
                degree_map[source_id] -= 1
                degree_map[target_id] -= 1

        # If the random model failed to add enough edges
        for edge_org in available_edges:
            nodes_to_select = [
                id for id, d in degree_map.items() for j in xrange(d + 1)
            ]
            source_id = random.choice(nodes)
            target_id = random.choice(nodes_to_select)
            while new_graph.has_edge(
                    source_id, target_id) or (not allow_self_edges
                                              and source_id == target_id):
                source_id = random.choice(nodes)
                target_id = random.choice(nodes_to_select)
            if debug:
                print "Adding:", (source_id, target_id)
            new_graph.add_edge(source_id, target_id,
                               graph.get_edge_data(edge_org[0], edge_org[1]))
            degree_map[source_id] += 1
            degree_map[target_id] += 1

        return new_graph

    new_graph = networkx.create_empty_copy(graph)
    #new_graph.add_nodes_from(graph.nodes())

    if randomization_type == "random":
        nodes = new_graph.nodes()
        for edge in graph.edges():
            source_id = random.choice(nodes)
            target_id = random.choice(nodes)
            while new_graph.has_edge(
                    source_id, target_id) or (not allow_self_edges
                                              and source_id == target_id):
                source_id = random.choice(nodes)
                target_id = random.choice(nodes)
            new_graph.add_edge(source_id, target_id,
                               graph.get_edge_data(edge[0], edge[1]))

    elif randomization_type == "preserve_topology":  # shuffle_nodes
        nodes = graph.nodes()
        random_nodes = graph.nodes()
        random.shuffle(random_nodes)
        equivalences = dict([(nodes[i], random_nodes[i])
                             for i in xrange(len(nodes))])
        new_graph.add_edges_from([
            (equivalences[current_edge[0]], equivalences[current_edge[1]],
             graph.get_edge_data(current_edge[0], current_edge[1]))
            for current_edge in graph.edges()
        ])

    elif randomization_type == "preserve_topology_and_node_degree":  # shuffle_nodes_within_same_degree
        nodes_by_degree = dict(
            (degree, []) for degree in graph.degree().values())
        graph_degree = graph.degree()
        [
            nodes_by_degree[graph_degree[node]].append(node)
            for node in graph_degree
        ]
        equivalences = {}
        for current_degree in nodes_by_degree.keys():
            nodes = nodes_by_degree[current_degree]
            random_nodes = list(nodes)
            random.shuffle(random_nodes)
            equivalences.update(
                dict([(nodes[i], random_nodes[i])
                      for i in xrange(len(nodes))]))
        new_graph.add_edges_from([
            (equivalences[current_edge[0]], equivalences[current_edge[1]],
             graph.get_edge_data(current_edge[0], current_edge[1]))
            for current_edge in graph.edges()
        ])

    elif randomization_type == "preserve_degree_distribution":
        ## add edges as well
        for current_node1, current_node2 in graph.edges():
            new_graph.add_edge(
                current_node1, current_node2,
                graph.get_edge_data(current_node1, current_node2))
        max_degree = sorted(graph.degree().values())[-1]
        #nodes_by_degree = dict( (degree,{}) for degree in graph.degree() )
        nodes_by_degree = dict(
            (degree, {}) for degree in xrange(max_degree + 1))
        graph_degree = graph.degree()
        [
            nodes_by_degree[graph_degree[node]].setdefault(node)
            for node in graph_degree
        ]
        #print new_graph.nodes(), new_graph.edges()
        #print nodes_by_degree
        #if n_edge < MIN_NUMBER_OF_PERTURBATION:
        #    n_perturbation = random.randint(n_edge/2, n_edge)
        #else:
        #    n_perturbation = random.randint(MIN_NUMBER_OF_PERTURBATION, n_edge)
        n_perturbation = random.randint(n_edge / 2, n_edge)
        for i in xrange(n_perturbation):
            n_trial = 0
            while True:
                n_trial += 1
                if n_trial > MAX_NUMBER_OF_TRIAL:
                    if debug:
                        print "Warning: Max number of trials exceeded in perturbation ", i
                    break
                source_id = random.choice(new_graph.nodes())
                source_degree = new_graph.degree(source_id)
                while source_degree < 1:
                    source_id = random.choice(new_graph.nodes())
                    source_degree = new_graph.degree(source_id)
                target_id = random.choice(new_graph.neighbors(source_id))
                target_degree = new_graph.degree(target_id)
                del nodes_by_degree[source_degree][source_id]
                nodes_by_degree[source_degree - 1].setdefault(source_id)
                del nodes_by_degree[target_degree][target_id]
                nodes_by_degree[target_degree - 1].setdefault(target_id)
                ## not very important to check for cases where new_source = source (v.v. for targets)
                new_source_id = random.choice(nodes_by_degree[source_degree -
                                                              1].keys())
                new_target_id = random.choice(nodes_by_degree[target_degree -
                                                              1].keys())
                if debug:
                    print source_id, target_id, " / ", new_source_id, new_target_id
                ## check if going to add an existing edge or self edge
                if new_graph.has_edge(
                        new_source_id,
                        new_target_id) or new_source_id == new_target_id:
                    del nodes_by_degree[source_degree - 1][source_id]
                    nodes_by_degree[source_degree].setdefault(source_id)
                    del nodes_by_degree[target_degree - 1][target_id]
                    nodes_by_degree[target_degree].setdefault(target_id)
                    continue
                if debug:
                    print "rm %d %d" % (source_id, target_id)
                edge_data = new_graph.get_edge_data(source_id, target_id)
                new_graph.delete_edge(source_id, target_id)
                if debug:
                    print "add %d %d" % (new_source_id, new_target_id)
                new_graph.add_edge(new_source_id, new_target_id, edge_data)
                del nodes_by_degree[source_degree - 1][new_source_id]
                nodes_by_degree[source_degree].setdefault(new_source_id)
                del nodes_by_degree[target_degree - 1][new_target_id]
                nodes_by_degree[target_degree].setdefault(new_target_id)
                break
        #self.randomize_graph(new_graph, "preserve_topology")

    elif randomization_type == "preserve_degree_distribution_and_node_degree":
        ## add edges as well
        for current_node1, current_node2 in graph.edges():
            new_graph.add_edge(
                current_node1, current_node2,
                graph.get_edge_data(current_node1, current_node2))
        nodes_by_degree = dict(
            (degree, {}) for degree in graph.degree().values())
        graph_degree = graph.degree()
        [
            nodes_by_degree[graph_degree[node]].setdefault(node)
            for node in graph_degree
        ]

        #if n_edge < MIN_NUMBER_OF_PERTURBATION:
        #    n_perturbation = random.randint(1, n_edge)
        #else:
        #    n_perturbation = random.randint(MIN_NUMBER_OF_PERTURBATION, n_edge)
        n_perturbation = random.randint(n_edge / 2, n_edge)
        for i in xrange(n_perturbation):
            source_id = random.choice(new_graph.nodes())
            source_degree = new_graph.degree(source_id)
            ## find a node for which another node with the same degree exists
            #available_neighbors = []
            n_trial = 0
            while True:  #(len(nodes_by_degree[source_degree]) < 2 or len(available_neighbors) < 1):
                n_trial += 1
                if n_trial > MAX_NUMBER_OF_TRIAL:
                    if debug:
                        print "Warning: Max number of trials exceeded in perturbation ", i
                    break
                source_id = random.choice(new_graph.nodes())
                source_degree = new_graph.degree(source_id)
                if len(nodes_by_degree[source_degree]) < 2:
                    continue
                available_neighbors = []
                ## find a neighbor for which another node with the same degree exists
                for neighbor_id in new_graph.neighbors_iter(source_id):
                    if source_degree == new_graph.degree(neighbor_id):
                        if len(nodes_by_degree[new_graph.degree(
                                neighbor_id)]) > 2:
                            available_neighbors.append(neighbor_id)
                    else:
                        if len(nodes_by_degree[new_graph.degree(
                                neighbor_id)]) > 1:
                            available_neighbors.append(neighbor_id)
                if len(available_neighbors) < 1:
                    continue
                target_id = random.choice(available_neighbors)
                target_degree = new_graph.degree(target_id)
                ## select a new source node with different id
                n_trial2 = 0
                inner_break = False
                while True:
                    n_trial2 += 1
                    if n_trial2 > MAX_NUMBER_OF_TRIAL:
                        if debug:
                            print "Warning: Max number of trials exceeded in perturbation ", i
                        inner_break = True
                        break
                    new_source_id = random.choice(
                        nodes_by_degree[source_degree].keys())
                    while new_source_id == source_id:
                        new_source_id = random.choice(
                            nodes_by_degree[source_degree].keys())
                    new_available_neighbors = []
                    ## find a neighbor as new target node for which id is different from target and has an id equivalent to target
                    for neighbor_id in new_graph.neighbors_iter(new_source_id):
                        if target_degree == new_graph.degree(neighbor_id):
                            new_available_neighbors.append(neighbor_id)
                    if len(new_available_neighbors) < 1:
                        continue
                    new_target_id = random.choice(new_available_neighbors)
                    if len(new_available_neighbors) > 1:
                        while new_target_id == target_id:
                            new_target_id = random.choice(
                                new_available_neighbors)
                            #print new_available_neighbors, new_target_id
                    else:
                        new_target_id = new_available_neighbors[0]
                    break
                if inner_break:
                    break
                if debug:
                    print source_id, target_id, " / ", new_source_id, new_target_id
                if source_id == new_target_id or new_source_id == target_id:
                    continue
                if new_graph.has_edge(source_id,
                                      new_target_id) or new_graph.has_edge(
                                          new_source_id, target_id):
                    continue
                if debug:
                    print "rm %d %d" % (source_id, target_id)
                    print "rm %d %d" % (new_source_id, new_target_id)
                edge_data_1 = new_graph.get_edge_data(source_id, target_id)
                edge_data_2 = new_graph.get_edge_data(new_source_id,
                                                      new_target_id)
                new_graph.delete_edge(source_id, target_id)
                new_graph.delete_edge(new_source_id, new_target_id)
                if debug:
                    print "add %d %d" % (source_id, new_target_id)
                    print "add %d %d" % (new_source_id, target_id)
                new_graph.add_edge(source_id, new_target_id, edge_data_1)
                new_graph.add_edge(new_source_id, target_id, edge_data_2)

    else:
        raise Exception("Unknown randomization type %s" % randomization_type)

    return new_graph
コード例 #21
0
def reduce_graph(G, M, draw=True):
    ''' G will be reduced to M-node,data server only, graph '''

    pos = nx.get_node_attributes(G, 'pos')
    empty_copy = nx.create_empty_copy(G)
    # print(empty_copy.nodes(data=True))
    G = empty_copy

    # Go through the nodes and save only the M nodes.
    m_nodes = {}

    # Adding an integer as a key as easier to iterate when computing hamming distance
    index = 0
    for node in G.nodes(data=True):
        if node[1]['wrk'] == 's':
            m_nodes[index] = node
            index += 1
    # print(m_nodes)

    # ctr = find_center_node(G)[0]
    # G.nodes[ctr]['wrk'] = 'd-ctr'

    for i in range(len(m_nodes)):
        # NOTE this for loop should be simplified to i > j
        for j in range(i + 1, len(m_nodes)):
            count_bit_difference = 0  #NOTE Should ALWAYS be at least 1.
            # can covert this to its own function
            for bit_position in range(8):
                #print(m_nodes[i][0][bit_position])
                #print(m_nodes[j][0][bit_position])
                if m_nodes[i][0][bit_position] != m_nodes[j][0][bit_position]:
                    count_bit_difference += 1

            G.add_edge(m_nodes[i][0],
                       m_nodes[j][0],
                       weight=count_bit_difference)

    # Removes white nodes. Need to do this so find_center_node() sees one connected component
    copy = G.copy()
    copy.remove_nodes_from(list(nx.isolates(copy)))
    red_ctr = find_center_node(copy)[0]
    G.nodes[red_ctr]['wrk'] = 'r-ctr'
    G.remove_nodes_from(list(nx.isolates(G)))  # remove white nodes

    # G.nodes[int(red_ctr, 2)]['wrk'] = 'r_ctr'

    if draw:  # draw an original graph with a network center
        plt1 = plt.figure(figsize=(15, 15))
        colors = set_node_colors(G)

        # Check if node only occurs once in list of edges. If yes, remove edge
        node_count = len(G.nodes)
        edges = G.edges()
        connection_counts = {}

        # Creates a dictionary that lists all the nodes a node is connected to (no duplicates)
        for node in range(0, node_count):
            for edge in edges:
                if node == edge[0] and edge[0] not in range(0, M):
                    if node in connection_counts:
                        connection_counts[node] += [edge[1]]
                    else:
                        connection_counts[node] = [edge[1]]

        for elem in connection_counts.keys():
            if len(connection_counts[elem]) == 1:
                G.remove_edge(elem, connection_counts[elem][0])

        # for edge in edges:
        #     if edge[0] or edge[1] not in range(0, M):
        #         if edge[0] in connection_counts:
        #             connection_counts[edge[0]] += [edge[1]]
        #         else:
        #             connection_counts[edge[0]] = [edge[1]]

        # for elem in connection_counts.keys():
        #     if len(connection_counts[elem]) == 1:
        #         G.remove_edge(elem, connection_counts[elem][0])

        # if not checkconnection(G, M):
        #     G.add_edge(elem, connection_counts[elem])

        nx.draw_networkx_nodes(G,
                               pos,
                               node_size=160,
                               node_color=colors,
                               edgecolors='gray',
                               cmap=plt.cm.Reds_r)
        nx.draw_networkx_edges(G, pos, alpha=0.2)
        labels = {}
        for node in G.nodes(data=True):
            labels[node[0]] = node[0]
        nx.draw_networkx_labels(G, pos, labels, font_size=10)

        edge_labels = nx.get_edge_attributes(G, 'weight')
        # formatted_labels = {}
        # for label in labels:
        #     formatted_labels[label]=  "weight: "+str(label[1])
        nx.draw_networkx_edge_labels(G, pos, edge_labels=edge_labels)

    # realize a logic to reduce the network based on find MST

    if draw:
        plt.xlim(-0.05, 1.05)
        plt.ylim(-0.05, 1.05)
        # plt.axis('off')
        plt.show(block=False)

    return G
コード例 #22
0
ファイル: graph_dependencies.py プロジェクト: UWNETLAB/pdpp
def depgraph(files='png', gs=default_graph_style):

    """
    This is a docstring
    """

    from pdpp.utils.directory_test import get_pdpp_tasks

    all_tasks: List[BaseTask] = get_pdpp_tasks()

    SOURCE = nx.DiGraph()
    SPARSE = nx.DiGraph()

    nodes = []
    edges = []
    disabled_nodes = []

    """
    This section populates the graph with nodes and edges from dependency tasks to dependent tasks
    """

    for task in all_tasks:
        if task.enabled:
            nodes.append(task.target_dir)
        else:
            disabled_nodes.append(task.target_dir)
        for linkage in task.dep_files:
            edges.append((linkage, task.target_dir))

    """
    This section creates the SPARSE graph, consisting only of edges indicating dependencies between tasks
    """

    SPARSE.add_nodes_from(
        nodes, 
        style=gs.TASK_NODE_STYLE, 
        shape=gs.TASK_NODE_SHAPE, 
        penwidth=gs.TASK_NODE_PENWIDTH, 
        categ="task"
        )
    SPARSE.add_nodes_from(
        disabled_nodes, 
        style=gs.TASK_NODE_STYLE, 
        shape=gs.TASK_NODE_SHAPE, 
        penwidth=gs.TASK_NODE_PENWIDTH, 
        categ="disabled"
        )
    SPARSE.add_edges_from(
        edges, 
        color=gs.EDGE_COLOR, 
        penwidth=gs.EDGE_PEN_WIDTH
        )        
    node_colour(SPARSE, gs)  

    output_name = "dependencies_sparse"
    export_graph(SPARSE, output_name, files)

    """
    The SOURCE graph can be built out from the SPARSE graph; it simply adds source files and draws edges between them and their tasks
    """

    SOURCE = SPARSE.copy()

    for task in all_tasks:
        for source_file in task.src_files:                
            src_links(task.target_dir, source_file, SOURCE, gs)

            
    output_name = "dependencies_source"
    export_graph(SOURCE, output_name, files)

    """
    The FILE graph is built from scratch, using edges to represent the connections between tasks and the files that they have as either 
    targets (implicitly defined as a file they output that another task relies upon) or dependencies (defined explicitly)
    """

    FILE = nx.create_empty_copy(SPARSE)    

    for task in all_tasks:
        
        # Add edges from dependency files
        for dep_dataclass in task.dep_files.values():

            # ADD FILES
            for dep_file in dep_dataclass.file_list:
                dep_name = join(dep_dataclass.task_name, dep_dataclass.task_out, dep_file)
                FILE.add_node(
                    dep_name, 
                    style=gs.FILE_NODE_STYLE, 
                    shape=gs.FILE_FILE_SHAPE, 
                    fillcolor=gs.FILE_NODE_COLOR, 
                    categ='file', 
                    label=dep_file, 
                    penwidth=gs.FILE_NODE_PENWIDTH
                    )
                FILE.add_edge(
                    dep_name, 
                    task.target_dir, 
                    color=gs.EDGE_COLOR, 
                    penwidth=gs.EDGE_PEN_WIDTH
                    )
            
            # ADD DIRECTORIES
            for dep_dir in dep_dataclass.dir_list:
                dep_name = join(dep_dataclass.task_name, dep_dataclass.task_out, dep_dir)
                dep_label = dep_dir + "/"
                FILE.add_node(
                    dep_name, 
                    style=gs.FILE_NODE_STYLE, 
                    shape=gs.FILE_DIR_SHAPE, 
                    fillcolor=gs.FILE_NODE_COLOR, 
                    categ='file', 
                    label=dep_label, 
                    penwidth=gs.FILE_NODE_PENWIDTH
                    )
                FILE.add_edge(
                    dep_name, 
                    task.target_dir, 
                    color=gs.EDGE_COLOR, 
                    penwidth=gs.EDGE_PEN_WIDTH
                    )
        
        # Add edges to targets (as defined by others)
        target_list = find_dependencies_from_others(task, all_tasks)

        for full_target_path in target_list:
            target_name = full_target_path.split('/')[-1]
            FILE.add_edge(
                task.target_dir, 
                full_target_path, 
                color=gs.EDGE_COLOR, 
                penwidth=gs.EDGE_PEN_WIDTH)
                    

    output_name = "dependencies_file"
    export_graph(FILE, output_name, files)
    
    ALL = FILE.copy()
    for task in all_tasks:        
        for source_file in task.src_files:                
            src_links(task.target_dir, source_file, ALL, gs)

    output_name = "dependencies_all"
    export_graph(ALL, output_name, files)
コード例 #23
0
 def reset(self):
     """Resets the simulation to an empty graph with no logical error."""
     self.G = nx.create_empty_copy(self.G)
     self.logical_error = False
コード例 #24
0
    def __init__(
        self,
        graph: nx.DiGraph = None,
        name: Optional[str] = None,
        pr_limit: int = 0,
        piggy_back_migrations: Optional[Sequence[MiniMigrator]] = None,
    ):
        # rebuild the graph to only use edges from the arm osx requirements
        graph2 = nx.create_empty_copy(graph)
        for node, attrs in graph.nodes(data="payload"):
            for plat_arch in self.arches:
                reqs = attrs.get(
                    f"{plat_arch}_requirements",
                    attrs.get("osx_64_requirements",
                              attrs.get("requirements", {})),
                )
                host_deps = set(as_iterable(reqs.get("host", set())))
                run_deps = set(as_iterable(reqs.get("run", set())))
                deps = host_deps.union(run_deps)

                # We are including the compiler stubs here so that
                # excluded_dependencies work correctly.
                # Edges to these compiler stubs are removed afterwards
                build_deps = set(as_iterable(reqs.get("build", set())))
                for build_dep in build_deps:
                    if build_dep.endswith("_stub"):
                        deps.add(build_dep)
                for dep in get_deps_from_outputs_lut(
                        deps, graph.graph["outputs_lut"]):
                    graph2.add_edge(dep, node)

        super().__init__(
            graph=graph2,
            pr_limit=pr_limit,
            check_solvable=False,
            piggy_back_migrations=piggy_back_migrations,
        )

        assert (not self.check_solvable
                ), "We don't want to check solvability for arm osx!"

        self.name = name

        # Excluded dependencies need to be removed before no target_packages are
        # filtered out so that if a target_package is excluded, its dependencies
        # are not added to the graph
        for excluded_dep in self.excluded_dependencies:
            self.graph.remove_nodes_from(
                nx.descendants(self.graph, excluded_dep))

        # We are constraining the scope of this migrator
        with indir("../conda-forge-pinning-feedstock/recipe/migrations"), open(
                "osx_arm64.txt", ) as f:
            self.target_packages = set(f.read().split())

        # filter the graph down to the target packages
        if self.target_packages:
            self.target_packages.add("python")  # hack that is ~harmless?
            packages = self.target_packages.copy()
            for target in self.target_packages:
                if target in self.graph.nodes:
                    packages.update(nx.ancestors(self.graph, target))
            self.graph.remove_nodes_from(
                [n for n in self.graph if n not in packages])

        # filter out stub packages and ignored packages
        for node, attrs in list(self.graph.nodes("payload")):
            if (not attrs or node.endswith("_stub") or (node.startswith("m2-"))
                    or (node.startswith("m2w64-"))
                    or (node in self.ignored_packages) or all_noarch(attrs)):
                pluck(self.graph, node)

        self.graph.remove_edges_from(nx.selfloop_edges(self.graph))
コード例 #25
0
    def __init__(
        self,
        graph: nx.DiGraph = None,
        name: Optional[str] = None,
        pr_limit: int = 0,
        piggy_back_migrations: Optional[Sequence[MiniMigrator]] = None,
    ):
        # rebuild the graph to only use edges from the arm and power requirements
        graph2 = nx.create_empty_copy(graph)
        for node, attrs in graph.nodes(data="payload"):
            for plat_arch in self.arches:
                deps = set().union(
                    *attrs.get(
                        f"{plat_arch}_requirements", attrs.get("requirements", {}),
                    ).values()
                )
                for dep in deps:
                    dep = graph.graph["outputs_lut"].get(dep, dep)
                    graph2.add_edge(dep, node)

        super().__init__(
            graph=graph2,
            pr_limit=pr_limit,
            check_solvable=False,
            piggy_back_migrations=piggy_back_migrations,
        )

        assert not self.check_solvable, "We don't want to check solvability for aarch!"
        # We are constraining the scope of this migrator
        with indir("../conda-forge-pinning-feedstock/recipe/migrations"), open(
            "arch_rebuild.txt", "r",
        ) as f:
            self.target_packages = set(f.read().split())

        self.name = name
        # filter the graph down to the target packages
        if self.target_packages:
            self.target_packages.add("python")  # hack that is ~harmless?
            packages = self.target_packages.copy()
            for target in self.target_packages:
                if target in self.graph.nodes:
                    packages.update(nx.ancestors(self.graph, target))
            self.graph.remove_nodes_from([n for n in self.graph if n not in packages])

        # filter out stub packages and ignored packages
        for node in list(self.graph.nodes):
            if (
                node.endswith("_stub")
                or (node.startswith("m2-"))
                or (node.startswith("m2w64-"))
                or (node in self.ignored_packages)
                or (
                    self.graph.nodes[node]
                    .get("payload", {})
                    .get("meta_yaml", {})
                    .get("build", {})
                    .get("noarch")
                )
            ):
                pluck(self.graph, node)
        self.graph.remove_edges_from(nx.selfloop_edges(self.graph))
コード例 #26
0
ファイル: PSOECHS.py プロジェクト: lee-jingu/SSMOECHS
def Optimizer(network, Alive_Node, Update=False, R=30, In_Median=30,First = False):
    PSO_NET = nx.create_empty_copy(network)
    PSO_CHID = []
    M = max(round(cf.P_CH*len(Alive_Node)),1)
    SN = 40
    MIR = 10
    CH = []
    v_arr = []
    x_arr = []
    RES_ARR = []
    FIT = []
    ## initializing
    for i in range(0,SN):
        choice = np.random.choice(Alive_Node,M,replace = False)
        choice_x = []
        choice_v = []
        for j in choice:
            x,y = PSO_NET.node[j]['pos']
            choice_x.append([x,y])
            choice_v.append([0,0])
        CH.append(choice)
        x_arr.append(choice_x)
        v_arr.append(choice_v)
        FIT.append(Get_Fitness(PSO_NET,choice,Alive_Node))
    v_arr = np.array(v_arr)
    x_arr = np.array(x_arr)    
    Gbest = np.where(np.min(FIT)==FIT)[0][0]
    w = np.array([0.9,0.9])
    ##update
    for Iter in range(0,MIR):
        PGD = x_arr[Gbest]
        for i in range(0,SN):
            if FIT[i] == FIT[Gbest]:
                continue
            for j in range(0,len(CH[i])):
                v_arr[i][j] =  w * v_arr[i][j] + 2*uniform(0,1)*(PGD[j] - x_arr[i][j])
                x_arr[i][j] += v_arr[i][j]
                NNDist = 10000
                NNID = 0
                x1,y1= x_arr[i][j]
                for T in Alive_Node:
                    x2,y2 = PSO_NET.node[T]['pos']
                    NewDist = math.sqrt((x1-x2)**2+(y1-y2)**2)
                    if NewDist < NNDist:
                        NNDist = NewDist
                        NNID = T
                CH[i][j] = NNID
            FIT[i] = Get_Fitness(PSO_NET,CH[i],Alive_Node)
            if FIT[i] < FIT[Gbest]:
                Gbest = i
        w = [w[0] - (0.9-0.4)/MIR, w[1] - (0.9-0.4)/MIR]


    PSO_CHID = CH[Gbest]
    for i in Alive_Node:
        if i in PSO_CHID:
            PSO_NET.node[i]['Next'] = 0
            continue
        NNDist = 1000
        CH_ARR = np.zeros(len(PSO_CHID))
        x,y = PSO_NET.node[i]['pos']
        COUNT_ARR = np.ones(len(PSO_CHID))
        for j in range(0,len(PSO_CHID)):
            if i == j:
                continue
            RES = PSO_NET.node[PSO_CHID[j]]['res_energy']
            x2,y2 = PSO_NET.node[PSO_CHID[j]]['pos']
            dist = math.sqrt((x-x2)**2+(y-y2)**2)
            dis2 = math.sqrt((x2-50)**2+(y2-50)**2)
            CH_ARR[j] = RES/(dist*dis2*COUNT_ARR[j])
        idx = np.where(CH_ARR == np.max(CH_ARR))[0][0]
        COUNT_ARR[idx] += 1
        PSO_NET.node[i]['Next'] = PSO_CHID[idx]

    if First == True:
        for i in Alive_Node:
            PSO_NET.add_edge(i,PSO_NET.node[i]['Next'])

    return PSO_NET, PSO_CHID, R, In_Median
コード例 #27
0
def emptygraph(G):
    sample = G.copy()
    sample = nx.create_empty_copy(sample, with_data = True)
    return sample
コード例 #28
0
def Optimizer(network,
              Alive_Node,
              Update=False,
              R=30,
              In_Median=30,
              First=False):
    SSMO_NET = nx.create_empty_copy(network)
    SSMO_CHID = []
    NB_Cluster = max(1, round(len(Alive_Node) * cf.P_CH))
    update = 0
    a = 0.5
    if Update == True:
        Rmax = 0
        for i in Alive_Node:
            x, y = SSMO_NET.node[i]['pos']
            R_tmp = math.sqrt(
                ((x - 50)**2 + (y - 50)**2)) / math.sqrt(NB_Cluster)
            if R_tmp > Rmax:
                Rmax = R_tmp

            if Rmax != R:
                R = Rmax
                update = 1

    if update == 1:
        for i in Alive_Node:
            SSMO_NET.node[i]['Cover'] = []
            for j in Alive_Node:
                if i == j:
                    continue
                x1, y1 = SSMO_NET.node[i]['pos']
                x2, y2 = SSMO_NET.node[j]['pos']
                D = math.sqrt((x1 - x2)**2 + (y1 - y2)**2)
                if D < R:
                    SSMO_NET.node[i]['Cover'].append(j)

    ## Initializing Phase
    SM_Arr = []
    MG = 5
    MIR = 100
    Swarm_Size = 40
    FIT = []
    FIT1 = []
    FIT2 = []
    MGLL = 10
    MLLL = 20
    Group0 = []
    Group1 = []
    Group2 = []
    Group3 = []

    for i in range(0, Swarm_Size):
        choice = np.random.choice(Alive_Node, NB_Cluster, replace=False)
        SM_Arr.append(choice)
        Group0.append(i)

    # NET_MAX = Get_MAX(SSMO_NET,SM_Arr,R,In_Median)
    NET_MAX = 0
    w1 = []
    w2 = []
    for i in range(0, Swarm_Size):
        f1, f2 = Get_Fitness(SSMO_NET, SM_Arr[i], R, In_Median, NET_MAX,
                             Alive_Node)
        FIT1.append(f1)
        w1.append(a)
        FIT2.append(f2)
        w2.append(1 - a)
    MAXFIT = np.max(FIT1)
    FIT1 = np.array(FIT1)
    FIT2 = np.array(FIT2)
    w1 = np.array(w1)
    w2 = np.array(w2)
    FIT = w1 * FIT1 / MAXFIT + w2 * FIT2

    Group = 1
    GLID = np.where(FIT == np.max(FIT))[0][0]
    LLID_arr = np.zeros(MG, dtype=np.int32)
    LLID_arr[0] = GLID
    LLL = np.zeros(MG, dtype=np.int32)
    Pr = 0.1
    GLL = 0
    for Iter in range(0, MIR):
        ## Local Leader Phase
        Pr += (0.4 - 0.1) / MIR
        for i in range(0, Group):
            if i == 0:
                temp = Group0
            if i == 1:
                temp = Group1
            if i == 2:
                temp = Group2
            if i == 3:
                temp = Group3

            LLID = LLID_arr[i]
            LLMAX = FIT[LLID]
            LMAX = FIT[LLID]
            MAXFIT = FIT[LLID]

            Prob_Arr = np.zeros(len(Alive_Node))
            for j in temp:
                if j in LLID_arr:
                    continue
                if j == GLID:
                    continue

                if random() < Pr:
                    LL = SM_Arr[LLID]
                    SM = SM_Arr[j]
                    Rand = np.random.choice(temp, 1)[0]
                    SMR = SM_Arr[Rand]
                    ARANGE = np.hstack([SM, LL, SMR])
                    b = uniform(0, 1)
                    d = uniform(-1, 1)
                    PROBSM = np.ones(len(SM)) * (1 - b - d)
                    PROBLL = np.ones(len(LL)) * (b)
                    PROBSMR = np.ones(len(SMR)) * (d)
                    Prob_Arr = np.hstack([PROBSM, PROBLL, PROBSMR])
                    Prob_Arr = np.exp(Prob_Arr) / np.sum(np.exp(Prob_Arr))
                    choice = list(
                        set(
                            np.random.choice(ARANGE,
                                             NB_Cluster,
                                             replace=False,
                                             p=Prob_Arr / np.sum(Prob_Arr))))
                    SM_Arr[j] = choice
                    FIT1[j], FIT2[j] = Get_Fitness(SSMO_NET, choice, R,
                                                   In_Median, FIT[LLID],
                                                   Alive_Node)
                    FIT[j] = a * FIT1[j] / MAXFIT + (1 - a) * FIT2[j]
                    if LMAX < FIT[j]:
                        LMAX = FIT[j]
                        LLID_arr[i] = j
            if LLMAX == LMAX:
                LLL[i] += 1

            ## Local Leader Decision
            if LLL[i] == MLLL:
                LLL[i] = 0
                for j in temp:
                    if j in LLID_arr:
                        continue
                    if j == GLID:
                        continue
                    if random() < Pr:
                        LL = SM_Arr[LLID]
                        GL = SM_Arr[GLID]
                        SM = SM_Arr[j]
                        ARANGE = np.hstack([SM, LL, GL])
                        b = uniform(0, 1)
                        c = uniform(0.2, 1)
                        PROBSM = np.ones(len(SM)) * (1 - b - c)
                        PROBLL = np.ones(len(LL)) * (b)
                        PROBGL = np.ones(len(GL)) * (c)
                        Prob_Arr = np.hstack([PROBSM, PROBLL, PROBGL])
                        Prob_Arr = np.exp(Prob_Arr) / np.sum(np.exp(Prob_Arr))
                        choice = list(
                            set(
                                np.random.choice(ARANGE,
                                                 NB_Cluster,
                                                 replace=False,
                                                 p=Prob_Arr /
                                                 np.sum(Prob_Arr))))
                    else:
                        choice = np.random.choice(Alive_Node,
                                                  NB_Cluster,
                                                  replace=False)
                    SM_Arr[j] = choice
                    FIT1[j], FIT2[j] = Get_Fitness(SSMO_NET, choice, R,
                                                   In_Median, FIT[LLID],
                                                   Alive_Node)
                    FIT[j] = a * FIT1[j] / MAXFIT + (1 - a) * FIT2[j]
                    if LMAX < FIT[j]:
                        LMAX = FIT[j]
                        LLID_arr[i] = j

        ## Global Leader Phase
        for i in range(0, Swarm_Size - 1):
            GGLMAX = FIT[GLID]
            GLMAX = FIT[GLID]
            if i == GLID:
                FIT1[i] = 1
                FIT[i] = FIT1[i] + FIT2[i]
                continue
            if i in LLID_arr:
                continue

            Prob = 0.9 * (FIT[i] / FIT[GLID]) + 0.1
            if Prob > random():
                GL = SM_Arr[GLID]
                SM = SM_Arr[i]
                Rand = np.random.choice(Group0, 1)[0]
                SMR = SM_Arr[Rand]
                ARANGE = np.hstack([SM, GL, SMR])
                b = uniform(0, 1)
                d = uniform(-1, 1)
                PROBSM = np.ones(len(SM)) * (1 - b - d)
                PROBGL = np.ones(len(GL)) * (b)
                PROBSMR = np.ones(len(SMR)) * (d)
                Prob_Arr = np.hstack([PROBSM, PROBGL, PROBSMR])
                Prob_Arr = np.exp(Prob_Arr) / np.sum(np.exp(Prob_Arr))
                choice = list(
                    set(
                        np.random.choice(ARANGE,
                                         NB_Cluster,
                                         replace=False,
                                         p=Prob_Arr / np.sum(Prob_Arr))))
                FIT1[i], FIT2[i] = Get_Fitness(SSMO_NET, choice, R, In_Median,
                                               FIT[LLID], Alive_Node)
                FIT[i] = a * FIT1[i] / MAXFIT + (1 - a) * FIT2[i]
                if FIT[i] > GLMAX:
                    GLMAX = FIT[i]
                    GLID = i
        if GLMAX == GGLMAX:
            GLL += 1

        ## Local Decision Phase
        # if LLL == MLLL:

        ## Global Decision Phase
        if GLL == MGLL:
            GLL = 0
            Group = min(Group + 1, MG - 1)
            Choice_Node = np.arange(0, Swarm_Size, 1)
            if Group == 2:
                Group0 = np.random.choice(Choice_Node,
                                          int(Swarm_Size / Group),
                                          replace=False)
                Choice_Node = list(set(Choice_Node) - set(Group0))
                Group1 = np.array(Choice_Node)
            if Group == 3:
                Group0 = np.random.choice(Choice_Node,
                                          int(Swarm_Size / Group),
                                          replace=False)
                Choice_Node = list(set(Choice_Node) - set(Group0))
                Group1 = np.random.choice(Choice_Node,
                                          int(Swarm_Size / Group),
                                          replace=False)
                Choice_Node = list(set(Choice_Node) - set(Group1))
                Group2 = np.array(Choice_Node)
            if Group == 4:
                Group0 = np.random.choice(Choice_Node,
                                          int(Swarm_Size / Group),
                                          replace=False)
                Choice_Node = list(set(Choice_Node) - set(Group0))
                Group1 = np.random.choice(Choice_Node,
                                          int(Swarm_Size / Group),
                                          replace=False)
                Choice_Node = list(set(Choice_Node) - set(Group1))
                Group2 = np.random.choice(Choice_Node,
                                          int(Swarm_Size / Group),
                                          replace=False)
                Choice_Node = list(set(Choice_Node) - set(Group2))
                Group3 = np.array(Choice_Node)

    SSMO_CHID = SM_Arr[GLID]
    INNER = []
    OUTER = []
    RTBS = []
    for i in SSMO_CHID:
        RTBS.append(SSMO_NET.node[i]['RTBS'])
    CENTER = min(np.average(RTBS), cf.D_o)
    for i in Alive_Node:
        if i in SSMO_CHID:
            if network.node[i]['RTBS'] > CENTER:
                OUTER.append(i)
                continue
            else:
                INNER.append(i)
                SSMO_NET.node[i]['Next'] = 0
                continue
        x1, y1 = SSMO_NET.node[i]['pos']
        NNID = 0
        NN_Dist = 1000
        for NN in SSMO_CHID:
            x2, y2 = SSMO_NET.node[NN]['pos']
            new_dist = math.sqrt((x1 - x2)**2 + (y1 - y2)**2)
            if new_dist < NN_Dist:
                NNID = NN
                NN_Dist = new_dist
        SSMO_NET.node[i]['Next'] = NNID

    for i in OUTER:
        NNID = 0
        NN = SSMO_NET.node[i]['RTBS']
        x, y = SSMO_NET.node[i]['pos']
        for j in INNER:
            x2, y2 = SSMO_NET.node[j]['pos']
            Dist = math.sqrt((x - x2)**2 + (y - y2)**2)
            if Dist < NN:
                NNID = j
                NN = Dist
        SSMO_NET.node[i]['Next'] = NNID

    if First == True:
        ## add_Edge
        for i in Alive_Node:
            if i in SSMO_CHID:
                continue
            SSMO_NET.add_edge(i, SSMO_NET.node[i]['Next'])

    return SSMO_NET, SSMO_CHID, R, In_Median
コード例 #29
0
ファイル: BSMO.py プロジェクト: lee-jingu/SSMOECHS
def Optimizer(network,
              Alive_Node,
              Update=False,
              R=30,
              In_Median=30,
              First=False,
              a=False):
    BSMO_NET = nx.create_empty_copy(network)
    BSMO_CHID = []
    Swarm_Size = 40
    MIR = 100
    NB_Cluster = max(round(cf.P_CH * len(Alive_Node)), 1)
    # Xmax = max(cf.AREA_W,cf.SINK_X)
    # Ymax = max(cf.AREA_H,cf.SINK_Y)
    if Update == True:
        Rmax = 0
        R_tmp = []
        for i in Alive_Node:
            x, y = BSMO_NET.node[i]['pos']
            R_tmp.append(BSMO_NET.node[i]['RTBS'])
        Rmax = np.max(R_tmp) / (cf.D_o * 1.1)
        if Rmax != R:
            R = Rmax

    ##Initializing
    SM_Arr = []
    FIT = []
    MG = 5
    Group0 = []
    Group1 = []
    Group2 = []
    Group3 = []
    NGroup = 1
    LLL = np.zeros(MG)
    GLL = 0
    MLLL = 20
    MGLL = 10

    for i in range(0, Swarm_Size):
        SM = np.zeros(cf.N_NODE, dtype=np.int32)
        choice = np.random.choice(Alive_Node, NB_Cluster, replace=False) - 1
        for j in choice:
            SM[j] = 1

        SM_Arr.append(SM)
        FIT.append(Get_Fitness(BSMO_NET, SM, Alive_Node, R))
        Group0.append(i)

    Pr = 0.1
    LLID = np.where(np.max(FIT) == FIT)[0][0]
    GLID = np.where(np.max(FIT) == FIT)[0][0]

    for Iter in range(0, MIR):
        ## Local Leader Phase
        Pr = Pr + (0.4 - 0.1) / MIR
        for i in range(0, MG):
            if i == 0:
                temp = Group0
            if i == 1:
                temp = Group1
            if i == 2:
                temp = Group2
            if i == 3:
                temp = Group3

            ## find LLID
            MAXFIT = 0
            count = 0
            for ID in temp:
                TMPFIT = FIT[ID]
                if TMPFIT > MAXFIT:
                    LLID = ID
                    MAXFIT = TMPFIT

            for j in temp:
                if FIT[j] == FIT[LLID]:
                    continue
                if FIT[j] == FIT[GLID]:
                    continue
                if Pr > random():
                    SM = SM_Arr[j]
                    LL = SM_Arr[LLID]
                    Rand = np.random.choice(temp, 1)[0]
                    SMR = SM_Arr[Rand]
                    b = randint(0, 1)
                    d = randint(-1, 1)
                    SM_Arr[j] = np.bitwise_xor(
                        SM,
                        np.bitwise_or(
                            np.bitwise_and(b, np.bitwise_xor(LL, SM)),
                            np.bitwise_and(d, np.bitwise_xor(SMR, SM))))
                    FIT[j] = Get_Fitness(BSMO_NET, SM_Arr[j], Alive_Node, R)
                if FIT[j] > FIT[LLID]:
                    count = 1
                    LLIDPOT = j
            if count == 0:
                LLL[i] += 1
            else:
                count = 0
                LLID = LLIDPOT

            ## Local Leader Decision
            if LLL[i] == MLLL:
                LLL[i] = 0
                for TT in temp:
                    if FIT[TT] == FIT[LLID]:
                        continue
                    if FIT[TT] == FIT[GLID]:
                        continue
                    if Pr > random():
                        SM = SM_Arr[TT]
                        LL = SM_Arr[LLID]
                        GL = SM_Arr[GLID]
                        b = randint(0, 1)
                        SM_Arr[TT] = np.bitwise_xor(
                            SM,
                            np.bitwise_or(
                                np.bitwise_and(b, np.bitwise_xor(LL, SM)),
                                np.bitwise_and(b, np.bitwise_xor(GL, SM))))
                        FIT[TT] = Get_Fitness(BSMO_NET, SM_Arr[TT], Alive_Node,
                                              R)

                    else:
                        SM = np.zeros(cf.N_NODE, dtype=np.int32)
                        choice = np.random.choice(
                            Alive_Node, NB_Cluster, replace=False) - 1
                        for KJ in choice:
                            SM[KJ] = 1
                        SM_Arr[TT] = SM
                        FIT[TT] = Get_Fitness(BSMO_NET, SM_Arr[TT], Alive_Node,
                                              R)

        ## Global Leader Phase
        count = 0
        GLID = np.where(np.max(FIT) == FIT)[0][0]
        for i in range(0, len(SM_Arr)):
            if FIT[i] == FIT[GLID]:
                continue
            Prob = 0.9 * (FIT[i] / FIT[GLID]) + 0.1
            if Prob > random():
                GL = SM_Arr[GLID]
                SM = SM_Arr[i]
                Rand = randint(0, Swarm_Size - 1)
                SMR = SM_Arr[Rand]
                b = randint(0, 1)
                d = randint(-1, 1)
                SM_Arr[i] = np.bitwise_xor(
                    SM,
                    np.bitwise_or(np.bitwise_and(b, np.bitwise_xor(GL, SM)),
                                  np.bitwise_and(d, np.bitwise_xor(SMR, SM))))
                FIT[i] = Get_Fitness(BSMO_NET, SM_Arr[i], Alive_Node, R)
                if FIT[i] > FIT[GLID]:
                    count = 1
        if count == 0:
            GLL += 1
        else:
            count = 0
            GLID = np.where(np.max(FIT) == FIT)[0][0]

        ## Global Desision
        if GLL == MGLL:
            GLL = 0
            NGroup = min(NGroup + 1, MG - 1)
            Choice_Node = np.arange(0, Swarm_Size, 1)
            if NGroup == 2:
                Group0 = np.random.choice(Choice_Node,
                                          int(Swarm_Size / NGroup),
                                          replace=False)
                Choice_Node = list(set(Choice_Node) - set(Group0))
                Group1 = np.array(Choice_Node)
            if NGroup == 3:
                Group0 = np.random.choice(Choice_Node,
                                          int(Swarm_Size / NGroup),
                                          replace=False)
                Choice_Node = list(set(Choice_Node) - set(Group0))
                Group1 = np.random.choice(Choice_Node,
                                          int(Swarm_Size / NGroup),
                                          replace=False)
                Choice_Node = list(set(Choice_Node) - set(Group1))
                Group2 = np.array(Choice_Node)
            if NGroup == 4:
                Group0 = np.random.choice(Choice_Node,
                                          int(Swarm_Size / NGroup),
                                          replace=False)
                Choice_Node = list(set(Choice_Node) - set(Group0))
                Group1 = np.random.choice(Choice_Node,
                                          int(Swarm_Size / NGroup),
                                          replace=False)
                Choice_Node = list(set(Choice_Node) - set(Group1))
                Group2 = np.random.choice(Choice_Node,
                                          int(Swarm_Size / NGroup),
                                          replace=False)
                Choice_Node = list(set(Choice_Node) - set(Group2))
                Group3 = np.array(Choice_Node)

    INNER = []
    OUTER = []
    RTBS = []
    GLID = np.where(np.max(FIT) == FIT)[0][0]
    BSMO_CHID = np.where(SM_Arr[GLID] == np.max(SM_Arr[GLID]))[0] + 1

    for i in BSMO_CHID:
        RTBS.append(BSMO_NET.node[i]['RTBS'])

    ## modified for topology
    MED = np.average(RTBS)
    for i in BSMO_CHID:
        if BSMO_NET.node[i]['RTBS'] < MED:
            INNER.append(i)
            BSMO_NET.node[i]['Next'] = 0
        else:
            OUTER.append(i)

    for i in Alive_Node:
        if i in BSMO_CHID:
            continue
        x, y = BSMO_NET.node[i]['pos']
        NNDist = 1000
        NNID = 0
        for j in BSMO_CHID:
            x2, y2 = BSMO_NET.node[j]['pos']
            NewDist = math.sqrt((x - x2)**2 + (y - y2)**2)
            if NNDist > NewDist:
                NNID = j
                NNDist = NewDist
        BSMO_NET.node[i]['Next'] = NNID

    for i in OUTER:
        NNID = 0
        NNDist = BSMO_NET.node[i]['RTBS']
        x, y = BSMO_NET.node[i]['pos']
        for j in INNER:
            x2, y2 = BSMO_NET.node[j]['pos']
            NewDist = math.sqrt((x - x2)**2 + (y - y2)**2)
            if NNDist > NewDist:
                NNID = j
                NNDist = NewDist
        BSMO_NET.node[i]['Next'] = NNID

    if First == True:
        ## add_Edge
        for i in Alive_Node:
            NEXT = BSMO_NET.node[i]['Next']
            if NEXT != 0:
                BSMO_NET.add_edge(i, NEXT)

    return BSMO_NET, BSMO_CHID, R, In_Median
コード例 #30
0
ファイル: datasets.py プロジェクト: isaachenrion/gcn
    def __init__(self, graph_dataset, batch_size):
        self.flat_graph_state_dim=graph_dataset.flat_graph_state_dim
        self.model_graph = nx.create_empty_copy(graph_dataset.graphs[0])

        super().__init__(graph_dataset, batch_size)
コード例 #31
0
In addition to constructing graphs node-by-node or edge-by-edge, 
they can also be generated by
"""
## 1. Applying classic graph operations, such as:

nx.subgraph(G, nbunch)  # induced subgraph view of G on nodes in nbunch
G.subgraph([1, 2, 3, 5]).edges

G3 = nx.union(G1, G2)  # graph union
G3.edges

nx.disjoint_union(G1, G2)  # graph union assuming all nodes are different
nx.cartesian_product(G1, G2)  # return Cartesian product graph
nx.compose(G1, G2)  # combine graphs identifying nodes common to both
nx.complement(G)  # graph complement
nx.create_empty_copy(G)  # return an empty copy of the same graph class
nx.to_undirected(G)  # return an undirected representation of G
nx.to_directed(G)  # return a directed representation of G

## 2. Using a call to one of the classic small graphs, e.g.,

petersen = nx.petersen_graph()
tutte = nx.tutte_graph()
maze = nx.sedgewick_maze_graph()
tet = nx.tetrahedral_graph()

## 3. Using a (constructive) generator for a classic graph, e.g.,

K_5 = nx.complete_graph(5)
K_3_5 = nx.complete_bipartite_graph(3, 5)
barbell = nx.barbell_graph(10, 10)
コード例 #32
0
def make_graph(
    names: List[str], gx: Optional[nx.DiGraph] = None, mark_not_archived=False,
) -> nx.DiGraph:
    logger.info("reading graph")

    if gx is None:
        gx = nx.DiGraph()

    new_names = [name for name in names if name not in gx.nodes]
    old_names = [name for name in names if name in gx.nodes]
    # silly typing force
    assert gx is not None
    old_names = sorted(  # type: ignore
        old_names, key=lambda n: gx.nodes[n].get("time", 0),
    )  # type: ignore

    total_names = new_names + old_names
    logger.info("start feedstock fetch loop")
    from .xonsh_utils import env

    debug = env.get("CONDA_FORGE_TICK_DEBUG", False)
    builder = _build_graph_sequential if debug else _build_graph_process_pool
    builder(gx, total_names, new_names, mark_not_archived=mark_not_archived)
    logger.info("feedstock fetch loop completed")

    gx2 = deepcopy(gx)
    logger.info("inferring nodes and edges")

    # make the outputs look up table so we can link properly
    outputs_lut = {
        k: node_name
        for node_name, node in gx.nodes.items()
        for k in node.get("payload", {}).get("outputs_names", [])
    }
    # add this as an attr so we can use later
    gx.graph["outputs_lut"] = outputs_lut
    strong_exports = {
        node_name
        for node_name, node in gx.nodes.items()
        if node.get("payload").get("strong_exports", False)
    }
    # This drops all the edge data and only keeps the node data
    gx = nx.create_empty_copy(gx)
    # TODO: label these edges with the kind of dep they are and their platform
    for node, node_attrs in gx2.nodes.items():
        with node_attrs["payload"] as attrs:
            # replace output package names with feedstock names via LUT
            deps = set(
                map(
                    lambda x: outputs_lut.get(x, x),
                    set().union(*attrs.get("requirements", {}).values()),
                ),
            )

            # handle strong run exports
            overlap = deps & strong_exports
            requirements = attrs.get("requirements")
            if requirements:
                requirements["host"].update(overlap)
                requirements["run"].update(overlap)

        for dep in deps:
            if dep not in gx.nodes:
                # for packages which aren't feedstocks and aren't outputs
                # usually these are stubs
                lzj = LazyJson(f"node_attrs/{dep}.json")
                lzj.update(feedstock_name=dep, bad=False, archived=True)
                gx.add_node(dep, payload=lzj)
            gx.add_edge(dep, node)
    logger.info("new nodes and edges inferred")
    return gx
コード例 #33
0
def mwrc_approx(G):
    """
    Compos's algorithm
    https://doi.org/10.1016/j.comnet.2008.08.013
    """

    if G.number_of_edges() <= 1:
        return G

    C_1, C_2, C_3 = .2, .2, .6
    nodes = G.nodes()
    node_id = {node: i for i, node in enumerate(nodes)}
    id_node = {i: node for i, node in enumerate(nodes)}
    #
    # Compute Degree and Weights for each vertex
    #
    d = [0 for _ in nodes]  # degree
    s = [0 for _ in nodes]  # sum of adjacent edge
    m = [0 for _ in nodes]  # max of adjacent edges
    total_weight = 0  # total_weight
    for edge in G.edges():
        i, j = node_id[edge[0]], node_id[edge[1]]

        w = G[edge[0]][edge[1]]['weight']
        d[i] += 1
        s[i] += w
        m[i] = max(m[i], w)
        d[j] += 1
        s[j] += w
        m[j] = max(m[j], w)
        total_weight += w

    w = [101 for _ in nodes]  # weight
    cf = [100001 for _ in nodes]  # estimated cost to f
    sp = [0 for _ in nodes]  # spanning potential
    sp_max = 0  # max spanning potential
    f = 0
    p = [None for _ in nodes]  # parents
    pd = [None for _ in nodes]  # parent degree
    ps = [None for _ in nodes]  # sum of parent neighbor weight

    #
    # Calculate Mean and StdDev and set C_4, C_5
    #
    mean = total_weight / G.number_of_edges()
    tot = 0
    for edge in G.edges():
        tot = tot + (G[edge[0]][edge[1]]['weight'] - mean)**2
    std = math.sqrt(tot / (G.number_of_edges() - 1))
    ratio = std / mean
    C_4, C_5 = 0, 0
    if ratio < .4 + .005 * (len(nodes) - 10):
        C_4, C_5 = 1, 1
    else:
        C_4, C_5 = .9, .1

    #
    # Select highest sp as initial vertex
    #
    for un_node in nodes:
        node = node_id[un_node]
        sp[node] = C_1 * d[node] + C_2 * d[node] / s[node] + C_3 / m[node]
        if sp[node] > sp_max:
            f = node
            sp_max = sp[node]

    w[f] = 0
    cf[f] = 0
    p[f], pd[f], ps[f] = f, 0, 1
    track = [None for _ in nodes]

    L = set([f])  # initialize set L
    T = set()
    track[f] = 1  # 1 means f is in L, 2 means f is in T
    num_spanned = 0
    wd = [1000001 for _ in nodes]
    jsp = [0 for _ in nodes]
    wd[f], jsp[f] = C_4 * w[f] + C_5 * cf[f], d[f] + pd[f] + (d[f] + pd[f]) / (
        s[f] + ps[f])

    while num_spanned < len(nodes):

        wd_min, jsp_max = 100001, 0
        u = 0

        for node in L:
            if wd[node] < wd_min:
                S = set([node])
                wd_min = wd[node]
            elif wd[node] == wd_min:
                S.add(node)

        for node in S:
            if jsp[node] >= jsp_max:
                jsp_max = jsp[node]
                u = id_node[node]
        L.remove(node_id[u])

        for neighbor in nx.all_neighbors(G, u):
            v = node_id[neighbor]
            u_id = node_id[u]
            if track[v] == 2:  # 2 means in T already
                continue
            uv_weight = G[u][neighbor]['weight']
            wd_new = C_4 * uv_weight + C_5 * (cf[u_id] + uv_weight)
            dv, du = d[v], d[u_id]
            jsp_new = dv + du + (dv + du) / (s[v] + s[u_id])
            if wd_new < wd[v]:
                wd[v] = wd_new
                jsp[v] = jsp_new
                p[v] = u_id
                cf[v] = cf[u_id] + G[u][neighbor]['weight']
                pd[v] = d[u_id]
                ps[v] = s[u_id]
            elif (wd_new == wd[v] and jsp_new >= jsp[v]):
                jsp[v] = jsp_new
                p[v] = u_id
                cf[v] = cf[u_id] + G[u][neighbor]['weight']
                pd[v] = d[u_id]
                ps[v] = s[u_id]

            if track[v] == None:
                L.add(v)
                track[v] = 1

        track[u_id] = 2
        num_spanned += 1
        par = id_node[p[u_id]]
        if par != u:
            T.add((u, par, G[u][par]['weight']))

    tree = nx.create_empty_copy(G)
    tree.add_weighted_edges_from(list(T))
    return tree
コード例 #34
0
#**************
#! NETWORK
#**************
years = range(1976, 2016)

G = setup_G(df3, "src", "dst")

tris, citations, triads = triads_and_cits(active, G, years)
shares = {
    year: x / y
    for year, x, y in zip(years, tris.values(), citations.values())
}

active_disc = active[(active["technology_src"] == "discrete")
                     & (active["technology_dst"] == "discrete")]
G = nx.create_empty_copy(G)
tris_disc, cits_disc, triads_disc = triads_and_cits(active_disc, G, years)
shares_disc = {
    year: x / y
    for year, x, y in zip(years, tris_disc.values(), citations.values())
}

active_com = active[(active["technology_src"] == "complex")
                    & (active["technology_dst"] == "complex")]
G = nx.create_empty_copy(G)
tris_com, cits_com, triads_com = triads_and_cits(active_com, G, years)
shares_com = {
    year: x / y
    for year, x, y in zip(years, tris_com.values(), citations.values())
}
コード例 #35
0
def Optimizer(network,
              Alive_Node,
              Update=False,
              R=30,
              In_Median=30,
              First=False):
    BSMO_NET = nx.create_empty_copy(network)
    BSMO_CHID = []
    Swarm_Size = 40
    MIR = 100

    if Update == True:
        MAX_X = 0
        MAX_Y = 0
        for i in Alive_Node:
            x, y = BSMO_NET.node[i]['pos']
            if x > MAX_X:
                MAX_X = x
            if y > MAX_Y:
                MAX_Y = y

        R = math.sqrt(MAX_X**2 + MAX_Y**2) / 4

    ##Initializing
    SM_Arr = []
    FIT = []
    MG = 4
    Group0 = []
    Group1 = []
    Group2 = []
    Group3 = []
    NGroup = 1
    LLL = np.zeros(MG)
    GLL = 0
    MLLL = 10
    MGLL = 20
    NB_Cluster = max(round(cf.P_CH * len(Alive_Node)), 1)
    for i in range(0, Swarm_Size):
        SM = []
        for j in Alive_Node:
            if random() <= cf.P_CH:
                SM.append(1)
            else:
                SM.append(0)
        SM_Arr.append(SM)
        FIT.append(Get_Fitness(BSMO_NET, SM, Alive_Node))
        Group0.append(i)

    Pr = 0.1
    LLID = np.where(np.max(FIT) == FIT)[0][0]
    GLID = np.where(np.max(FIT) == FIT)[0][0]

    for Iter in range(0, MIR):
        ## Local Leader Phase
        Pr = Pr + (0.4 - 0.1) / MIR
        for i in range(0, MG):
            if i == 0:
                temp = Group0
            if i == 1:
                temp = Group1
            if i == 2:
                temp = Group2
            if i == 3:
                temp = Group3

            ## find LLID
            MAXFIT = 0
            count = 0
            for ID in temp:
                TMPFIT = FIT[ID]
                if TMPFIT > MAXFIT:
                    LLID = ID
                    MAXFIT = TMPFIT

            for j in temp:
                if FIT[j] == FIT[LLID]:
                    continue
                if FIT[j] == FIT[GLID]:
                    continue
                if Pr > random():
                    SM = SM_Arr[j]
                    LL = SM_Arr[LLID]
                    Rand = np.random.choice(temp, 1)[0]
                    SMR = SM_Arr[Rand]
                    b = randint(0, 1)
                    d = randint(-1, 1)
                    SM_Arr[j] = np.bitwise_xor(
                        SM,
                        np.bitwise_or(
                            np.bitwise_and(b, np.bitwise_xor(LL, SM)),
                            np.bitwise_and(d, np.bitwise_xor(SMR, SM))))
                    FIT[j] = Get_Fitness(BSMO_NET, SM_Arr[j], Alive_Node)
                if FIT[j] > FIT[LLID]:
                    count = 1
                    LLIDPOT = j
            if count == 0:
                LLL[i] += 1
            else:
                count = 0
                LLID = LLIDPOT

            ## Local Leader Decision
            if LLL[i] == MLLL:
                LLL[i] = 0
                for TT in temp:
                    if FIT[TT] == FIT[LLID]:
                        continue
                    if FIT[TT] == FIT[GLID]:
                        continue
                    if Pr > random():
                        SM = SM_Arr[TT]
                        LL = SM_Arr[LLID]
                        GL = SM_Arr[GLID]
                        b = randint(0, 1)
                        SM_Arr[TT] = np.bitwise_xor(
                            SM,
                            np.bitwise_or(
                                np.bitwise_and(b, np.bitwise_xor(LL, SM)),
                                np.bitwise_and(b, np.bitwise_xor(GL, SM))))
                        FIT[TT] = Get_Fitness(BSMO_NET, SM_Arr[TT], Alive_Node)

                    else:
                        SM = []
                        for KT in Alive_Node:
                            if random() < cf.P_CH:
                                SM.append(KT)
                            else:
                                SM.append(KT)
                        SM_Arr[TT] = SM
                        FIT[TT] = Get_Fitness(BSMO_NET, SM_Arr[TT], Alive_Node)

        ## Global Leader Phase
        count = 0
        GLID = np.where(np.max(FIT) == FIT)[0][0]
        for i in range(0, len(SM_Arr)):
            if FIT[i] == FIT[GLID]:
                continue
            Prob = 0.9 * (FIT[i] / FIT[GLID]) + 0.1
            if Prob > random():
                GL = SM_Arr[GLID]
                SM = SM_Arr[i]
                Rand = randint(0, Swarm_Size - 1)
                SMR = SM_Arr[Rand]
                b = randint(0, 1)
                d = randint(-1, 1)
                SM_Arr[i] = np.bitwise_xor(
                    SM,
                    np.bitwise_or(np.bitwise_and(b, np.bitwise_xor(GL, SM)),
                                  np.bitwise_and(d, np.bitwise_xor(SMR, SM))))
                FIT[i] = Get_Fitness(BSMO_NET, SM_Arr[i], Alive_Node)
                if FIT[i] > FIT[GLID]:
                    count = 1
        if count == 0:
            GLL += 1
        else:
            count = 0
            GLID = np.where(np.max(FIT) == FIT)[0][0]

        ## Global Desision
        if GLL == MGLL:
            GLL = 0
            NGroup += 1
            Choice_Node = np.arange(0, Swarm_Size, 1)
            if NGroup == 2:
                Group0 = np.random.choice(Choice_Node,
                                          int(len(Choice_Node) / NGroup),
                                          replace=False)
                Choice_Node = list(set(Choice_Node) - set(Group0))
                Group1 = np.array(Choice_Node)
            if NGroup == 3:
                Group0 = np.random.choice(Choice_Node,
                                          int(len(Choice_Node) / NGroup),
                                          replace=False)
                Choice_Node = list(set(Choice_Node) - set(Group0))
                Group1 = np.random.choice(Choice_Node,
                                          int(len(Choice_Node) / NGroup),
                                          replace=False)
                Choice_Node = list(set(Choice_Node) - set(Group1))
                Group2 = np.array(Choice_Node)
            if NGroup == 4:
                Group0 = np.random.choice(Choice_Node,
                                          int(len(Choice_Node) / NGroup),
                                          replace=False)
                Choice_Node = list(set(Choice_Node) - set(Group0))
                Group1 = np.random.choice(Choice_Node,
                                          int(len(Choice_Node) / NGroup),
                                          replace=False)
                Choice_Node = list(set(Choice_Node) - set(Group1))
                Group2 = np.random.choice(Choice_Node,
                                          int(len(Choice_Node) / NGroup),
                                          replace=False)
                Choice_Node = list(set(Choice_Node) - set(Group2))
                Group3 = np.array(Choice_Node)
            if NGroup == 5:
                BSMO_CHID = SM_Arr[GLID]

    INNER = []
    OUTER = []
    BSMO_CHID = np.where(SM_Arr[GLID] == np.max(SM_Arr[GLID]))[0] + 1
    for i in BSMO_CHID:
        if BSMO_NET.node[i]['RTBS'] < R:
            INNER.append(i)
            BSMO_NET.node[i]['Next'] = 0
        else:
            OUTER.append(i)

    for i in Alive_Node:
        if i in BSMO_CHID:
            continue
        x, y = BSMO_NET.node[i]['pos']
        NNDist = 1000
        NNID = 0
        for j in BSMO_CHID:
            if i == j:
                continue
            x2, y2 = BSMO_NET.node[j]['pos']
            NewDist = math.sqrt((x - x2)**2 + (y - y2)**2)
            if NNDist > NewDist:
                NNID = j
                NNDist = NewDist
        BSMO_NET.node[i]['Next'] = NNID

    for i in OUTER:
        NNID = 0
        NNDist = 1000
        x, y = BSMO_NET.node[i]['pos']
        for j in INNER:
            x2, y2 = BSMO_NET.node[j]['pos']
            NewDist = math.sqrt((x - x2)**2 + (y - y2)**2)
            if NNDist > NewDist:
                NNID = j
                NNDist = NewDist
        BSMO_NET.node[i]['Next'] = NNID

    if First == True:
        ## add_Edge
        for i in Alive_Node:
            BSMO_NET.add_edge(i, BSMO_NET.node[i]['Next'])

    return BSMO_NET, BSMO_CHID, R