Пример #1
0
 def test_push(self):
     to_push = [6, 1, 4, 3, 2, 5, 0]
     h_sifted = [0, 2, 1, 6, 3, 5, 4]
     q = MappedQueue()
     for elt in to_push:
         q.push(elt)
     assert_equal(q.h, h_sifted)
     self._check_map(q)
Пример #2
0
 def test_push(self):
     to_push = [6, 1, 4, 3, 2, 5, 0]
     h_sifted = [0, 2, 1, 6, 3, 5, 4]
     q = MappedQueue()
     for elt in to_push:
         q.push(elt)
     assert_equal(q.h, h_sifted)
     self._check_map(q)
Пример #3
0
 def test_push(self):
     to_push = [6, 1, 4, 3, 2, 5, 0]
     h_sifted = [0, 2, 1, 6, 3, 5, 4]
     q = MappedQueue()
     for elt in to_push:
         q.push(elt, priority=elt)
     assert q.heap == h_sifted
     self._check_map(q)
Пример #4
0
 def test_push_duplicate(self):
     to_push = [2, 1, 0]
     h_sifted = [0, 2, 1]
     q = MappedQueue()
     for elt in to_push:
         inserted = q.push(elt)
         assert_equal(inserted, True)
     assert_equal(q.h, h_sifted)
     self._check_map(q)
     inserted = q.push(1)
     assert_equal(inserted, False)
Пример #5
0
 def test_push_duplicate(self):
     to_push = [2, 1, 0]
     h_sifted = [0, 2, 1]
     q = MappedQueue()
     for elt in to_push:
         inserted = q.push(elt, priority=elt)
         assert inserted
     assert q.heap == h_sifted
     self._check_map(q)
     inserted = q.push(1, priority=1)
     assert not inserted
Пример #6
0
 def test_push_duplicate(self):
     to_push = [2, 1, 0]
     h_sifted = [0, 2, 1]
     q = MappedQueue()
     for elt in to_push:
         inserted = q.push(elt)
         assert inserted == True
     assert q.h == h_sifted
     self._check_map(q)
     inserted = q.push(1)
     assert inserted == False
Пример #7
0
 def test_push_duplicate(self):
     to_push = [2, 1, 0]
     h_sifted = [0, 2, 1]
     q = MappedQueue()
     for elt in to_push:
         inserted = q.push(elt)
         assert_equal(inserted, True)
     assert_equal(q.h, h_sifted)
     self._check_map(q)
     inserted = q.push(1)
     assert_equal(inserted, False)
Пример #8
0
 def _make_mapped_queue(self, h):
     q = MappedQueue()
     q.heap = h
     q.position = {elt: pos for pos, elt in enumerate(h)}
     return q
Пример #9
0
 def test_len(self):
     h = [5, 4, 3, 2, 1, 0]
     q = MappedQueue(h)
     self._check_map(q)
     assert_equal(len(q), 6)
Пример #10
0
def greedy_modularity_communities(G,
                                  weight=None,
                                  resolution=1,
                                  n_communities=1):
    r"""Find communities in G using greedy modularity maximization.

    This function uses Clauset-Newman-Moore greedy modularity maximization [2]_.

    Greedy modularity maximization begins with each node in its own community
    and joins the pair of communities that most increases modularity until no
    such pair exists or until number of communities `n_communities` is reached.

    This function maximizes the generalized modularity, where `resolution`
    is the resolution parameter, often expressed as $\gamma$.
    See :func:`~networkx.algorithms.community.quality.modularity`.

    Parameters
    ----------
    G : NetworkX graph

    weight : string or None, optional (default=None)
        The name of an edge attribute that holds the numerical value used
        as a weight.  If None, then each edge has weight 1.
        The degree is the sum of the edge weights adjacent to the node.

    resolution : float (default=1)
        If resolution is less than 1, modularity favors larger communities.
        Greater than 1 favors smaller communities.

    n_communities: int
        Desired number of communities: the community merging process is
        terminated once this number of communities is reached, or until
        modularity can not be further increased. Must be between 1 and the
        total number of nodes in `G`. Default is ``1``, meaning the community
        merging process continues until all nodes are in the same community
        or until the best community structure is found.

    Returns
    -------
    partition: list
        A list of frozensets of nodes, one for each community.
        Sorted by length with largest communities first.

    Examples
    --------
    >>> from networkx.algorithms.community import greedy_modularity_communities
    >>> G = nx.karate_club_graph()
    >>> c = greedy_modularity_communities(G)
    >>> sorted(c[0])
    [8, 14, 15, 18, 20, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33]

    See Also
    --------
    modularity

    References
    ----------
    .. [1] Newman, M. E. J. "Networks: An Introduction", page 224
       Oxford University Press 2011.
    .. [2] Clauset, A., Newman, M. E., & Moore, C.
       "Finding community structure in very large networks."
       Physical Review E 70(6), 2004.
    .. [3] Reichardt and Bornholdt "Statistical Mechanics of Community
       Detection" Phys. Rev. E74, 2006.
    .. [4] Newman, M. E. J."Analysis of weighted networks"
       Physical Review E 70(5 Pt 2):056131, 2004.
    """
    directed = G.is_directed()
    N = G.number_of_nodes()
    if (n_communities < 1) or (n_communities > N):
        raise ValueError(
            f"n_communities must be between 1 and {N}. Got {n_communities}")

    # Count edges (or the sum of edge-weights for weighted graphs)
    m = G.size(weight)
    q0 = 1 / m

    # Calculate degrees (notation from the papers)
    # a : the fraction of (weighted) out-degree for each node
    # b : the fraction of (weighted) in-degree for each node
    if directed:
        a = {
            node: deg_out * q0
            for node, deg_out in G.out_degree(weight=weight)
        }
        b = {node: deg_in * q0 for node, deg_in in G.in_degree(weight=weight)}
    else:
        a = b = {node: deg * q0 * 0.5 for node, deg in G.degree(weight=weight)}

    # this preliminary step collects the edge weights for each node pair
    # It handles multigraph and digraph and works fine for graph.
    dq_dict = defaultdict(lambda: defaultdict(float))
    for u, v, wt in G.edges(data=weight, default=1):
        if u == v:
            continue
        dq_dict[u][v] += wt
        dq_dict[v][u] += wt

    # now scale and subtract the expected edge-weights term
    for u, nbrdict in dq_dict.items():
        for v, wt in nbrdict.items():
            dq_dict[u][v] = q0 * wt - resolution * (a[u] * b[v] + b[u] * a[v])

    # Use -dq to get a max_heap instead of a min_heap
    # dq_heap holds a heap for each node's neighbors
    dq_heap = {
        u: MappedQueue({(u, v): -dq
                        for v, dq in dq_dict[u].items()})
        for u in G
    }
    # H -> all_dq_heap holds a heap with the best items for each node
    H = MappedQueue([dq_heap[n].heap[0] for n in G if len(dq_heap[n]) > 0])

    # Initialize single-node communities
    communities = {n: frozenset([n]) for n in G}

    # Merge communities until we can't improve modularity or until desired number of
    # communities (n_communities) is reached.
    while len(H) > n_communities:
        # Find best merge
        # Remove from heap of row maxes
        # Ties will be broken by choosing the pair with lowest min community id
        try:
            negdq, u, v = H.pop()
        except IndexError:
            break
        dq = -negdq
        # Remove best merge from row u heap
        dq_heap[u].pop()
        # Push new row max onto H
        if len(dq_heap[u]) > 0:
            H.push(dq_heap[u].heap[0])
        # If this element was also at the root of row v, we need to remove the
        # duplicate entry from H
        if dq_heap[v].heap[0] == (v, u):
            H.remove((v, u))
            # Remove best merge from row v heap
            dq_heap[v].remove((v, u))
            # Push new row max onto H
            if len(dq_heap[v]) > 0:
                H.push(dq_heap[v].heap[0])
        else:
            # Duplicate wasn't in H, just remove from row v heap
            dq_heap[v].remove((v, u))
        # Stop when change is non-positive (no improvement possible)
        if dq <= 0:
            break

        # Perform merge
        communities[v] = frozenset(communities[u] | communities[v])
        del communities[u]

        # Get neighbor communities connected to the merged communities
        u_nbrs = set(dq_dict[u])
        v_nbrs = set(dq_dict[v])
        all_nbrs = (u_nbrs | v_nbrs) - {u, v}
        both_nbrs = u_nbrs & v_nbrs
        # Update dq for merge of u into v
        for w in all_nbrs:
            # Calculate new dq value
            if w in both_nbrs:
                dq_vw = dq_dict[v][w] + dq_dict[u][w]
            elif w in v_nbrs:
                dq_vw = dq_dict[v][w] - resolution * (a[u] * b[w] +
                                                      a[w] * b[u])
            else:  # w in u_nbrs
                dq_vw = dq_dict[u][w] - resolution * (a[v] * b[w] +
                                                      a[w] * b[v])
            # Update rows v and w
            for row, col in [(v, w), (w, v)]:
                dq_heap_row = dq_heap[row]
                # Update dict for v,w only (u is removed below)
                dq_dict[row][col] = dq_vw
                # Save old max of per-row heap
                if len(dq_heap_row) > 0:
                    d_oldmax = dq_heap_row.heap[0]
                else:
                    d_oldmax = None
                # Add/update heaps
                d = (row, col)
                d_negdq = -dq_vw
                # Save old value for finding heap index
                if w in v_nbrs:
                    # Update existing element in per-row heap
                    dq_heap_row.update(d, d, priority=d_negdq)
                else:
                    # We're creating a new nonzero element, add to heap
                    dq_heap_row.push(d, priority=d_negdq)
                # Update heap of row maxes if necessary
                if d_oldmax is None:
                    # No entries previously in this row, push new max
                    H.push(d, priority=d_negdq)
                else:
                    # We've updated an entry in this row, has the max changed?
                    row_max = dq_heap_row.heap[0]
                    if d_oldmax != row_max or d_oldmax.priority != row_max.priority:
                        H.update(d_oldmax, row_max)

        # Remove row/col u from dq_dict matrix
        for w in dq_dict[u]:
            # Remove from dict
            dq_old = dq_dict[w][u]
            del dq_dict[w][u]
            # Remove from heaps if we haven't already
            if w != v:
                # Remove both row and column
                for row, col in [(w, u), (u, w)]:
                    dq_heap_row = dq_heap[row]
                    # Check if replaced dq is row max
                    d_old = (row, col)
                    if dq_heap_row.heap[0] == d_old:
                        # Update per-row heap and heap of row maxes
                        dq_heap_row.remove(d_old)
                        H.remove(d_old)
                        # Update row max
                        if len(dq_heap_row) > 0:
                            H.push(dq_heap_row.heap[0])
                    else:
                        # Only update per-row heap
                        dq_heap_row.remove(d_old)

        del dq_dict[u]
        # Mark row u as deleted, but keep placeholder
        dq_heap[u] = MappedQueue()
        # Merge u into v and update a
        a[v] += a[u]
        a[u] = 0
        if directed:
            b[v] += b[u]
            b[u] = 0

    return sorted(communities.values(), key=len, reverse=True)
Пример #11
0
def greedy_modularity_communities(G, weight=None):
    """Find communities in graph using Clauset-Newman-Moore greedy modularity
    maximization. This method currently supports the Graph class and does not
    consider edge weights.

    Greedy modularity maximization begins with each node in its own community
    and joins the pair of communities that most increases modularity until no
    such pair exists.

    Parameters
    ----------
    G : NetworkX graph

    Returns
    -------
    Yields sets of nodes, one for each community.

    Examples
    --------
    >>> from networkx.algorithms.community import greedy_modularity_communities
    >>> G = nx.karate_club_graph()
    >>> c = list(greedy_modularity_communities(G))
    >>> sorted(c[0])
    [8, 14, 15, 18, 20, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33]

    References
    ----------
    .. [1] M. E. J Newman 'Networks: An Introduction', page 224
       Oxford University Press 2011.
    .. [2] Clauset, A., Newman, M. E., & Moore, C.
       "Finding community structure in very large networks."
       Physical Review E 70(6), 2004.
    """

    # Count nodes and edges
    N = len(G.nodes())
    m = sum([d.get('weight', 1) for u, v, d in G.edges(data=True)])
    q0 = 1.0 / (2.0*m)

    # Map node labels to contiguous integers
    label_for_node = dict((i, v) for i, v in enumerate(G.nodes()))
    node_for_label = dict((label_for_node[i], i) for i in range(N))

    # Calculate degrees
    k_for_label = G.degree(G.nodes(), weight=weight)
    k = [k_for_label[label_for_node[i]] for i in range(N)]

    # Initialize community and merge lists
    communities = dict((i, frozenset([i])) for i in range(N))
    merges = []

    # Initial modularity
    partition = [[label_for_node[x] for x in c] for c in communities.values()]
    q_cnm = modularity(G, partition)

    # Initialize data structures
    # CNM Eq 8-9 (Eq 8 was missing a factor of 2 (from A_ij + A_ji)
    # a[i]: fraction of edges within community i
    # dq_dict[i][j]: dQ for merging community i, j
    # dq_heap[i][n] : (-dq, i, j) for communitiy i nth largest dQ
    # H[n]: (-dq, i, j) for community with nth largest max_j(dQ_ij)
    a = [k[i]*q0 for i in range(N)]
    dq_dict = dict(
        (i, dict(
            (j, 2*q0 - 2*k[i]*k[j]*q0*q0)
            for j in [
                node_for_label[u]
                for u in G.neighbors(label_for_node[i])]
            if j != i))
        for i in range(N))
    dq_heap = [
        MappedQueue([
            (-dq, i, j)
            for j, dq in dq_dict[i].items()])
        for i in range(N)]
    H = MappedQueue([
        dq_heap[i].h[0]
        for i in range(N)
        if len(dq_heap[i]) > 0])

    # Merge communities until we can't improve modularity
    while len(H) > 1:
        # Find best merge
        # Remove from heap of row maxes
        # Ties will be broken by choosing the pair with lowest min community id
        try:
            dq, i, j = H.pop()
        except IndexError:
            break
        dq = -dq
        # Remove best merge from row i heap
        dq_heap[i].pop()
        # Push new row max onto H
        if len(dq_heap[i]) > 0:
            H.push(dq_heap[i].h[0])
        # If this element was also at the root of row j, we need to remove the
        # duplicate entry from H
        if dq_heap[j].h[0] == (-dq, j, i):
            H.remove((-dq, j, i))
            # Remove best merge from row j heap
            dq_heap[j].remove((-dq, j, i))
            # Push new row max onto H
            if len(dq_heap[j]) > 0:
                H.push(dq_heap[j].h[0])
        else:
            # Duplicate wasn't in H, just remove from row j heap
            dq_heap[j].remove((-dq, j, i))
        # Stop when change is non-positive
        if dq <= 0:
            break

        # Perform merge
        communities[j] = frozenset(communities[i] | communities[j])
        del communities[i]
        merges.append((i, j, dq))
        # New modularity
        q_cnm += dq
        # Get list of communities connected to merged communities
        i_set = set(dq_dict[i].keys())
        j_set = set(dq_dict[j].keys())
        all_set = (i_set | j_set) - set([i, j])
        both_set = i_set & j_set
        # Merge i into j and update dQ
        for k in all_set:
            # Calculate new dq value
            if k in both_set:
                dq_jk = dq_dict[j][k] + dq_dict[i][k]
            elif k in j_set:
                dq_jk = dq_dict[j][k] - 2.0*a[i]*a[k]
            else:
                # k in i_set
                dq_jk = dq_dict[i][k] - 2.0*a[j]*a[k]
            # Update rows j and k
            for row, col in [(j, k), (k, j)]:
                # Save old value for finding heap index
                if k in j_set:
                    d_old = (-dq_dict[row][col], row, col)
                else:
                    d_old = None
                # Update dict for j,k only (i is removed below)
                dq_dict[row][col] = dq_jk
                # Save old max of per-row heap
                if len(dq_heap[row]) > 0:
                    d_oldmax = dq_heap[row].h[0]
                else:
                    d_oldmax = None
                # Add/update heaps
                d = (-dq_jk, row, col)
                if d_old is None:
                    # We're creating a new nonzero element, add to heap
                    dq_heap[row].push(d)
                else:
                    # Update existing element in per-row heap
                    dq_heap[row].update(d_old, d)
                # Update heap of row maxes if necessary
                if d_oldmax is None:
                    # No entries previously in this row, push new max
                    H.push(d)
                else:
                    # We've updated an entry in this row, has the max changed?
                    if dq_heap[row].h[0] != d_oldmax:
                        H.update(d_oldmax, dq_heap[row].h[0])

        # Remove row/col i from matrix
        i_neighbors = dq_dict[i].keys()
        for k in i_neighbors:
            # Remove from dict
            dq_old = dq_dict[k][i]
            del dq_dict[k][i]
            # Remove from heaps if we haven't already
            if k != j:
                # Remove both row and column
                for row, col in [(k, i), (i, k)]:
                    # Check if replaced dq is row max
                    d_old = (-dq_old, row, col)
                    if dq_heap[row].h[0] == d_old:
                        # Update per-row heap and heap of row maxes
                        dq_heap[row].remove(d_old)
                        H.remove(d_old)
                        # Update row max
                        if len(dq_heap[row]) > 0:
                            H.push(dq_heap[row].h[0])
                    else:
                        # Only update per-row heap
                        dq_heap[row].remove(d_old)

        del dq_dict[i]
        # Mark row i as deleted, but keep placeholder
        dq_heap[i] = MappedQueue()
        # Merge i into j and update a
        a[j] += a[i]
        a[i] = 0

    communities = [
        frozenset([label_for_node[i] for i in c])
        for c in communities.values()]
    return sorted(communities, key=len, reverse=True)
Пример #12
0
def greedy_modularity_communities(G, K, weight=None):
    """Find communities in graph using Clauset-Newman-Moore greedy modularity
    maximization. This method currently supports the Graph class and does not
    consider edge weights.

    Greedy modularity maximization begins with each node in its own community
    and joins the pair of communities that most increases modularity until no
    such pair exists.
    
    Parameters
    ----------
    G : NetworkX graph

    Returns
    -------
    Yields sets of nodes, one for each community.

    Examples
    --------
    >>> from networkx.algorithms.community import greedy_modularity_communities
    >>> G = nx.karate_club_graph()
    >>> c = list(greedy_modularity_communities(G))
    >>> sorted(c[0])
    [8, 14, 15, 18, 20, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33]

    References
    ----------
    .. [1] M. E. J Newman 'Networks: An Introduction', page 224
       Oxford University Press 2011.
    .. [2] Clauset, A., Newman, M. E., & Moore, C.
       "Finding community structure in very large networks."
       Physical Review E 70(6), 2004.
    """
    G = nx.from_numpy_array(G.detach().numpy(), create_using=nx.DiGraph())
    # Count nodes and edges
    N = len(G.nodes())
    m = sum([d.get('weight', 1) for u, v, d in G.edges(data=True)])
    q0 = 1.0 / (2.0 * m)

    # Map node labels to contiguous integers
    label_for_node = dict((i, v) for i, v in enumerate(G.nodes()))
    node_for_label = dict((label_for_node[i], i) for i in range(N))

    # Calculate degrees
    k_for_label = G.degree(G.nodes(), weight=weight)
    k = [k_for_label[label_for_node[i]] for i in range(N)]

    # Initialize community and merge lists
    communities = dict((i, frozenset([i])) for i in range(N))
    merges = []

    # Initial modularity
    partition = [[label_for_node[x] for x in c] for c in communities.values()]
    q_cnm = modularity(G, partition)

    # Initialize data structures
    # CNM Eq 8-9 (Eq 8 was missing a factor of 2 (from A_ij + A_ji)
    # a[i]: fraction of edges within community i
    # dq_dict[i][j]: dQ for merging community i, j
    # dq_heap[i][n] : (-dq, i, j) for communitiy i nth largest dQ
    # H[n]: (-dq, i, j) for community with nth largest max_j(dQ_ij)
    a = [k[i] * q0 for i in range(N)]
    dq_dict = dict(
        (i,
         dict((j, q0 * (G[i][j]['weight'] + G[j][i]['weight']) -
               2 * k[i] * k[j] * q0 * q0) for j in
              [node_for_label[u] for u in G.neighbors(label_for_node[i])]
              if j != i)) for i in range(N))
    #    print(min([len(x[1]) for x in dq_dict.values()]))
    #    raise Exception()
    #    return dq_dict
    dq_heap = [
        MappedQueue([(-dq, i, j) for j, dq in dq_dict[i].items()])
        for i in range(N)
    ]
    H = MappedQueue([dq_heap[i].h[0] for i in range(N) if len(dq_heap[i]) > 0])

    # Merge communities until we can't improve modularity
    while len(H) > 1:
        # Find best merge
        # Remove from heap of row maxes
        # Ties will be broken by choosing the pair with lowest min community id
        try:
            dq, i, j = H.pop()
        except IndexError:
            break
        dq = -dq
        # Remove best merge from row i heap
        dq_heap[i].pop()
        # Push new row max onto H
        if len(dq_heap[i]) > 0:
            H.push(dq_heap[i].h[0])
        # If this element was also at the root of row j, we need to remove the
        # duplicate entry from H
        if dq_heap[j].h[0] == (-dq, j, i):
            H.remove((-dq, j, i))
            # Remove best merge from row j heap
            dq_heap[j].remove((-dq, j, i))
            # Push new row max onto H
            if len(dq_heap[j]) > 0:
                H.push(dq_heap[j].h[0])
        else:
            # Duplicate wasn't in H, just remove from row j heap
            dq_heap[j].remove((-dq, j, i))

        # Perform merge
        communities[j] = frozenset(communities[i] | communities[j])
        del communities[i]
        merges.append((i, j, dq))

        #        print(len(communities))
        # Stop when change is non-positive
        if len(communities) == K:
            break

        # New modularity
        q_cnm += dq
        # Get list of communities connected to merged communities
        i_set = set(dq_dict[i].keys())
        j_set = set(dq_dict[j].keys())
        all_set = (i_set | j_set) - set([i, j])
        both_set = i_set & j_set
        # Merge i into j and update dQ
        for k in all_set:
            # Calculate new dq value
            if k in both_set:
                dq_jk = dq_dict[j][k] + dq_dict[i][k]
            elif k in j_set:
                dq_jk = dq_dict[j][k] - 2.0 * a[i] * a[k]
            else:
                # k in i_set
                dq_jk = dq_dict[i][k] - 2.0 * a[j] * a[k]
            # Update rows j and k
            for row, col in [(j, k), (k, j)]:
                # Save old value for finding heap index
                if k in j_set:
                    d_old = (-dq_dict[row][col], row, col)
                else:
                    d_old = None
                # Update dict for j,k only (i is removed below)
                dq_dict[row][col] = dq_jk
                # Save old max of per-row heap
                if len(dq_heap[row]) > 0:
                    d_oldmax = dq_heap[row].h[0]
                else:
                    d_oldmax = None
                # Add/update heaps
                d = (-dq_jk, row, col)
                if d_old is None:
                    # We're creating a new nonzero element, add to heap
                    dq_heap[row].push(d)
                else:
                    # Update existing element in per-row heap
                    dq_heap[row].update(d_old, d)
                # Update heap of row maxes if necessary
                if d_oldmax is None:
                    # No entries previously in this row, push new max
                    H.push(d)
                else:
                    # We've updated an entry in this row, has the max changed?
                    if dq_heap[row].h[0] != d_oldmax:
                        H.update(d_oldmax, dq_heap[row].h[0])

        # Remove row/col i from matrix
        i_neighbors = dq_dict[i].keys()
        for k in i_neighbors:
            # Remove from dict
            dq_old = dq_dict[k][i]
            del dq_dict[k][i]
            # Remove from heaps if we haven't already
            if k != j:
                # Remove both row and column
                for row, col in [(k, i), (i, k)]:
                    # Check if replaced dq is row max
                    d_old = (-dq_old, row, col)
                    if dq_heap[row].h[0] == d_old:
                        # Update per-row heap and heap of row maxes
                        dq_heap[row].remove(d_old)
                        H.remove(d_old)
                        # Update row max
                        if len(dq_heap[row]) > 0:
                            H.push(dq_heap[row].h[0])
                    else:
                        # Only update per-row heap
                        #                        if d_old in dq_heap[row].d:
                        dq_heap[row].remove(d_old)

        del dq_dict[i]
        # Mark row i as deleted, but keep placeholder
        dq_heap[i] = MappedQueue()
        # Merge i into j and update a
        a[j] += a[i]
        a[i] = 0


#    communities = [
#        set([label_for_node[i] for i in c])
#        for c in communities.values()]
    heap = []
    for j in communities:
        heapq.heappush(heap, (a[j], set(communities[j])))
    while len(heap) > K:
        weight1, com1 = heapq.heappop(heap)
        weight2, com2 = heapq.heappop(heap)
        com1.update(com2)
        heapq.heappush(heap, (weight1 + weight2, com1))
    communities = [x[1] for x in heap]
    r = torch.zeros(N, K)
    print(len(communities))
    for i, c in enumerate(communities):
        for v in c:
            r[v, i] = 1
    return r
Пример #13
0
 def _make_mapped_queue(self, h):
     q = MappedQueue()
     q.h = h
     q.d = dict((elt, pos) for pos, elt in enumerate(h))
     return q
Пример #14
0
 def _make_mapped_queue(self, h):
     q = MappedQueue()
     q.h = h
     q.d = dict((elt, pos) for pos, elt in enumerate(h))
     return q
Пример #15
0
def greedy_modularity_communities(G, weight=None):

    # Count nodes and edges
    N = len(G.nodes())
    m = sum([d.get('weight', 1) for u, v, d in G.edges(data=True)])

    # Map node labels to contiguous integers
    num_to_id_map = dict((i, v) for i, v in enumerate(G.nodes()))
    id_to_num_map = dict((num_to_id_map[i], i) for i in range(N))

    #print(num_to_id_map)
    # Initialize community and merge lists
    communities = dict((i, frozenset([i])) for i in range(N))
    merges = []

    # Initial conformity
    partition = [[num_to_id_map[x] for x in c] for c in communities.values()]
    q_cnm = total_conformity(G, partition)

    dq_dict = dict((
        i,
        dict(
            (j, peer_conformity(G, num_to_id_map[i], num_to_id_map[j]))
            for j in [id_to_num_map[u] for u in G.neighbors(num_to_id_map[i])]
            if j != i)) for i in range(N))

    dq_heap = [
        MappedQueue([(-dq, i, j) for j, dq in dq_dict[i].items()])
        for i in range(N)
    ]

    H = MappedQueue([dq_heap[i].h[0] for i in range(N) if len(dq_heap[i]) > 0])

    # Merge communities until we can't improve modularity
    while len(H) > 1:
        # Find best merge
        # Remove from heap of row maxes
        # Ties will be broken by choosing the pair with lowest min community id
        try:
            dq, i, j = H.pop()
        except IndexError:
            break
        #print("1. Popped " + str(i) + " " + str(j))
        dq = -dq
        # Remove best merge from row i heap
        dq_heap[i].pop()
        # Push new row max onto H
        # if len(dq_heap[i]) > 0:
        #     print("2. Pushed into H " + str(dq_heap[i].h[0]))
        #     H.push(dq_heap[i].h[0])

        # if there is an edge from j to i, remove the corresponding entries
        if i in dq_dict[j].keys():
            d_old = (-dq_dict[j][i], j, i)
            old_max = dq_heap[j].h[0]
            #print("3. Removing from H " + str(old_max))
            H.remove(old_max)
            #print("3.1. Removing from dq_heap["+str(j)+"] "+ str(d_old))
            dq_heap[j].remove(d_old)
            if len(dq_heap[j]) > 0:
                H.push(dq_heap[j].h[0])
            del dq_dict[j][i]
        # Stop when change is non-positive
        # if dq <= 0:
        #     break

        # Perform merge
        #print("4. Merging " + str(i) + " " + str(j)+ " into " + str(j))
        communities[j] = frozenset(communities[i] | communities[j])
        del communities[i]
        merges.append((i, j, dq))
        # New modularity
        q_cnm += dq

        # updating dq for all k which i and j connects to
        i_set = set(dq_dict[i].keys())
        j_set = set(dq_dict[j].keys())
        all_set = (i_set | j_set) - {i, j}
        both_set = i_set & j_set

        # remove old maximum of heap j
        old_j_heap_max = None
        if len(dq_heap[j]) > 0:
            old_j_heap_max = dq_heap[j].h[0]

        for k in all_set:
            if k in both_set:
                # get the old j->k entry
                old_d_jk = (-dq_dict[j][k], j, k)
                old_heap_max = dq_heap[j].h[0]

                # remove the entry from heap[j]
                #print("5. Removing from dq_heap["+str(j)+"] "+ str(old_d_jk))
                dq_heap[j].remove(old_d_jk)
                # update the entry in dq_dict
                dq_dict[j][k] = dq_dict[j][k] + dq_dict[i][k]
                # push the entry to heap[j]
                new_d_jk = (-dq_dict[j][k], j, k)
                #print("6. Pushing to dq_heap["+str(j)+"] "+ str(new_d_jk))
                dq_heap[j].push(new_d_jk)
                # if the new entry is the new max, push it into H
                # remove the entry from H if it was present in H

                # print("7. Removing from H "+ str(old_heap_max))
                # H.remove(old_heap_max)
                # print("8. Pushing to H "+ str(dq_heap[j].h[0]))
                # H.push(dq_heap[j].h[0])

                # get the entry for i->k
                d_old = (-dq_dict[i][k], i, k)
                # remove the entry from H if it was present in H
                # if dq_heap[i].h[0] == d_old:
                #   print("9. Removing from H "+ str(d_old))
                #   H.remove(d_old)
                #print("10. Removing from dq_heap["+str(i)+"] "+ str(d_old))
                dq_heap[i].remove(d_old)
                del dq_dict[i][k]
            elif k in j_set:
                continue
            else:
                if k == j:
                    continue
                # k in i_set
                d_old = (-dq_dict[i][k], i, k)
                # remove the entry from H if it was present in H
                # if dq_heap[i].h[0] == d_old:
                #   print("11. Removing from H "+ str(d_old))
                #   H.remove(d_old)
                # remove the entry from heap[j]
                #print("12. Removing from dq_heap["+str(i)+"] "+ str(d_old))
                dq_heap[i].remove(d_old)
                dq_dict[j][k] = dq_dict[i][k]
                del dq_dict[i][k]
                # push the entry to heap[j]
                # if len(dq_heap[j])>0:
                #   old_heap_max = dq_heap[j].h[0]
                #   print("13. Removing from H "+ str(old_heap_max))
                #   H.remove(old_heap_max)

                #print("14. Pushing to dq_heap["+str(j)+"] "+ str((-dq_dict[j][k], j, k)))
                dq_heap[j].push((-dq_dict[j][k], j, k))
                # if the new entry is the new max, push it into H
                # print("15. Pushing to H "+ str(dq_heap[j].h[0]))
                # H.push(dq_heap[j].h[0])

        if len(dq_heap[j]) > 0 and (old_j_heap_max == None
                                    or old_j_heap_max != dq_heap[j].h[0]):
            if old_j_heap_max is not None:
                #print("13. Removing from H "+ str(old_j_heap_max))
                H.remove(old_j_heap_max)
            #print("15. Pushing to H "+ str(dq_heap[j].h[0]))
            H.push(dq_heap[j].h[0])

        # updating dq for all nodes that connect to j and i
        i_set = set(k for k in dq_dict.keys() if i in dq_dict[k].keys())
        j_set = set(k for k in dq_dict.keys() if j in dq_dict[k].keys())
        all_set = (i_set | j_set) - {i, j}
        both_set = i_set & j_set
        for k in all_set:
            if k in both_set:
                if i == k:
                    continue
                old_d_kj = (-dq_dict[k][j], k, j)
                old_heap_max = dq_heap[k].h[0]
                #print("16. Removing from dq_heap["+str(k)+"] "+ str(old_d_kj))
                dq_heap[k].remove(old_d_kj)

                old_d_ki = (-dq_dict[k][i], k, i)
                #print("17. Removing from dq_heap["+str(k)+"] "+ str(old_d_ki))
                dq_heap[k].remove(old_d_ki)

                #print("18. Removing from H "+ str(old_heap_max))
                H.remove(old_heap_max)

                d_new = (-(dq_dict[k][j] + dq_dict[k][i]), k, j)
                dq_dict[k][j] = dq_dict[k][j] + dq_dict[k][i]
                del dq_dict[k][i]

                #print("19. Pushing to dq_heap["+str(k)+"] "+ str(d_new))
                dq_heap[k].push(d_new)
                #print("20. Pushing to H "+ str(dq_heap[k].h[0]))
                H.push(dq_heap[k].h[0])
            elif k in j_set:
                continue
            else:
                if k == j:
                    continue
                # k in i_set
                d_old = (-dq_dict[k][i], k, i)
                old_heap_max = dq_heap[k].h[0]
                #print("21. Removing from dq_heap["+str(k)+"] "+ str(d_old))
                dq_heap[k].remove(d_old)
                #print("22. Removing from H "+ str(old_heap_max))
                H.remove(old_heap_max)

                d_new = (-dq_dict[k][i], k, j)
                dq_dict[k][j] = dq_dict[k][i]
                del dq_dict[k][i]
                #print("23. Pushing to dq_heap["+str(k)+"] "+ str(d_new))
                dq_heap[k].push(d_new)
                #print("24. Pushing to H "+ str(dq_heap[k].h[0]))
                H.push(dq_heap[k].h[0])

        del dq_dict[i]
        dq_heap[i] = MappedQueue()

    communities = [
        frozenset([num_to_id_map[i] for i in c]) for c in communities.values()
    ]
    return sorted(communities, key=len, reverse=True)
Пример #16
0
 def test_init(self):
     h = [5, 4, 3, 2, 1, 0]
     q = MappedQueue(h)
     self._check_map(q)
Пример #17
0
 def _make_mapped_queue(self, h):
     priority_dict = {elt: elt for elt in h}
     return MappedQueue(priority_dict)
Пример #18
0
def greedy_modularity_communities(G, weight=None, resolution=1):
    """Find communities in G using greedy modularity maximization.

    This function uses Clauset-Newman-Moore greedy modularity maximization [2]_.
    This method currently supports the Graph class.

    Greedy modularity maximization begins with each node in its own community
    and joins the pair of communities that most increases modularity until no
    such pair exists.

    This function maximizes the generalized modularity, where `resolution`
    is the resolution parameter, often expressed as $\gamma$.
    See :func:`~networkx.algorithms.community.quality.modularity`.

    Parameters
    ----------
    G : NetworkX graph
    weight : string or None, optional (default=None)
       The name of an edge attribute that holds the numerical value used
       as a weight.  If None, then each edge has weight 1.
       The degree is the sum of the edge weights adjacent to the node.

    Returns
    -------
    list
        A list of sets of nodes, one for each community.
        Sorted by length with largest communities first.

    Examples
    --------
    >>> from networkx.algorithms.community import greedy_modularity_communities
    >>> G = nx.karate_club_graph()
    >>> c = list(greedy_modularity_communities(G))
    >>> sorted(c[0])
    [8, 14, 15, 18, 20, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33]

    See Also
    --------
    modularity

    References
    ----------
    .. [1] M. E. J Newman "Networks: An Introduction", page 224
       Oxford University Press 2011.
    .. [2] Clauset, A., Newman, M. E., & Moore, C.
       "Finding community structure in very large networks."
       Physical Review E 70(6), 2004.
    .. [3] Reichardt and Bornholdt "Statistical Mechanics of Community
       Detection" Phys. Rev. E74, 2006.
    """

    # Count nodes and edges
    N = len(G.nodes())
    m = sum([d.get("weight", 1) for u, v, d in G.edges(data=True)])
    q0 = 1.0 / (2.0 * m)

    # Map node labels to contiguous integers
    label_for_node = {i: v for i, v in enumerate(G.nodes())}
    node_for_label = {label_for_node[i]: i for i in range(N)}

    # Calculate degrees
    k_for_label = G.degree(G.nodes(), weight=weight)
    k = [k_for_label[label_for_node[i]] for i in range(N)]

    # Initialize community and merge lists
    communities = {i: frozenset([i]) for i in range(N)}
    merges = []

    # Initial modularity
    partition = [[label_for_node[x] for x in c] for c in communities.values()]
    q_cnm = modularity(G, partition, resolution=resolution)

    # Initialize data structures
    # CNM Eq 8-9 (Eq 8 was missing a factor of 2 (from A_ij + A_ji)
    # a[i]: fraction of edges within community i
    # dq_dict[i][j]: dQ for merging community i, j
    # dq_heap[i][n] : (-dq, i, j) for communitiy i nth largest dQ
    # H[n]: (-dq, i, j) for community with nth largest max_j(dQ_ij)
    a = [k[i] * q0 for i in range(N)]
    dq_dict = {
        i: {
            j: 2 * q0 * G.get_edge_data(i, j).get(weight, 1.0) -
            2 * resolution * k[i] * k[j] * q0 * q0
            for j in
            [node_for_label[u] for u in G.neighbors(label_for_node[i])]
            if j != i
        }
        for i in range(N)
    }
    dq_heap = [
        MappedQueue([(-dq, i, j) for j, dq in dq_dict[i].items()])
        for i in range(N)
    ]
    H = MappedQueue([dq_heap[i].h[0] for i in range(N) if len(dq_heap[i]) > 0])

    # Merge communities until we can't improve modularity
    while len(H) > 1:
        # Find best merge
        # Remove from heap of row maxes
        # Ties will be broken by choosing the pair with lowest min community id
        try:
            dq, i, j = H.pop()
        except IndexError:
            break
        dq = -dq
        # Remove best merge from row i heap
        dq_heap[i].pop()
        # Push new row max onto H
        if len(dq_heap[i]) > 0:
            H.push(dq_heap[i].h[0])
        # If this element was also at the root of row j, we need to remove the
        # duplicate entry from H
        if dq_heap[j].h[0] == (-dq, j, i):
            H.remove((-dq, j, i))
            # Remove best merge from row j heap
            dq_heap[j].remove((-dq, j, i))
            # Push new row max onto H
            if len(dq_heap[j]) > 0:
                H.push(dq_heap[j].h[0])
        else:
            # Duplicate wasn't in H, just remove from row j heap
            dq_heap[j].remove((-dq, j, i))
        # Stop when change is non-positive
        if dq <= 0:
            break

        # Perform merge
        communities[j] = frozenset(communities[i] | communities[j])
        del communities[i]
        merges.append((i, j, dq))
        # New modularity
        q_cnm += dq
        # Get list of communities connected to merged communities
        i_set = set(dq_dict[i].keys())
        j_set = set(dq_dict[j].keys())
        all_set = (i_set | j_set) - {i, j}
        both_set = i_set & j_set
        # Merge i into j and update dQ
        for k in all_set:
            # Calculate new dq value
            if k in both_set:
                dq_jk = dq_dict[j][k] + dq_dict[i][k]
            elif k in j_set:
                dq_jk = dq_dict[j][k] - 2.0 * resolution * a[i] * a[k]
            else:
                # k in i_set
                dq_jk = dq_dict[i][k] - 2.0 * resolution * a[j] * a[k]
            # Update rows j and k
            for row, col in [(j, k), (k, j)]:
                # Save old value for finding heap index
                if k in j_set:
                    d_old = (-dq_dict[row][col], row, col)
                else:
                    d_old = None
                # Update dict for j,k only (i is removed below)
                dq_dict[row][col] = dq_jk
                # Save old max of per-row heap
                if len(dq_heap[row]) > 0:
                    d_oldmax = dq_heap[row].h[0]
                else:
                    d_oldmax = None
                # Add/update heaps
                d = (-dq_jk, row, col)
                if d_old is None:
                    # We're creating a new nonzero element, add to heap
                    dq_heap[row].push(d)
                else:
                    # Update existing element in per-row heap
                    dq_heap[row].update(d_old, d)
                # Update heap of row maxes if necessary
                if d_oldmax is None:
                    # No entries previously in this row, push new max
                    H.push(d)
                else:
                    # We've updated an entry in this row, has the max changed?
                    if dq_heap[row].h[0] != d_oldmax:
                        H.update(d_oldmax, dq_heap[row].h[0])

        # Remove row/col i from matrix
        i_neighbors = dq_dict[i].keys()
        for k in i_neighbors:
            # Remove from dict
            dq_old = dq_dict[k][i]
            del dq_dict[k][i]
            # Remove from heaps if we haven't already
            if k != j:
                # Remove both row and column
                for row, col in [(k, i), (i, k)]:
                    # Check if replaced dq is row max
                    d_old = (-dq_old, row, col)
                    if dq_heap[row].h[0] == d_old:
                        # Update per-row heap and heap of row maxes
                        dq_heap[row].remove(d_old)
                        H.remove(d_old)
                        # Update row max
                        if len(dq_heap[row]) > 0:
                            H.push(dq_heap[row].h[0])
                    else:
                        # Only update per-row heap
                        dq_heap[row].remove(d_old)

        del dq_dict[i]
        # Mark row i as deleted, but keep placeholder
        dq_heap[i] = MappedQueue()
        # Merge i into j and update a
        a[j] += a[i]
        a[i] = 0

    communities = [
        frozenset([label_for_node[i] for i in c])
        for c in communities.values()
    ]
    return sorted(communities, key=len, reverse=True)
def greedy_modularity_communities(G, K):

    #  Code modified from https://networkx.github.io/documentation/latest/_modules/networkx/algorithms/community/modularity_max.html#greedy_modularity_communities

    G = nx.from_numpy_array(G.detach().numpy(), nx.Graph())
    node_num = len(G.nodes())
    m = sum([d.get('weight', 1) for u, v, d in G.edges(data=True)])
    q0 = 1.0 / (2.0 * m)
    label_for_node = {}
    for i, v in enumerate(G.nodes()):
        label_for_node[i] = v
    node_for_label = {}
    communities = {}
    k = []
    a = []
    merges = []
    degree_for_label = G.degree(G.nodes())
    for i in range(node_num):
        node_for_label[label_for_node[i]] = i
        communities[i] = frozenset([i])
        k.append(degree_for_label[label_for_node[i]])
        a.append(q0 * k[i])
    partition = [[label_for_node[x] for x in c] for c in communities.values()]
    q_cnm = modularity(G, partition)

    dq_dict = dict(
        (i,
         dict((j, q0 * (G[i][j]['weight'] + G[j][i]['weight']) -
               2 * k[i] * k[j] * q0 * q0) for j in
              [node_for_label[u] for u in G.neighbors(label_for_node[i])]
              if j != i)) for i in range(node_num))
    dq_heap = [
        MappedQueue([(-dq, i, j) for j, dq in dq_dict[i].items()])
        for i in range(node_num)
    ]
    H = MappedQueue(
        [dq_heap[i].h[0] for i in range(node_num) if len(dq_heap[i]) > 0])
    # Merge communities until we can't improve modularity
    while len(H) > 1:
        dq, i, j = H.pop()
        dq = -dq
        dq_heap[i].pop()
        if len(dq_heap[i]) != 0:
            H.push(dq_heap[i].h[0])
        if dq_heap[j].h[0] == (-dq, j, i):
            H.remove((-dq, j, i))
            dq_heap[j].remove((-dq, j, i))
            if len(dq_heap[j]) > 0:
                H.push(dq_heap[j].h[0])
        else:
            dq_heap[j].remove((-dq, j, i))

        communities[j] = frozenset(communities[i] | communities[j])
        del communities[i]
        merges.append((i, j, dq))

        if len(communities) == K:
            break
        q_cnm += dq
        i_set = set(dq_dict[i].keys())
        j_set = set(dq_dict[j].keys())
        all_set = (i_set | j_set) - set([i, j])
        both_set = i_set & j_set
        for k in all_set:
            if k in both_set:
                dq_jk = dq_dict[j][k] + dq_dict[i][k]
            elif k in j_set:
                dq_jk = dq_dict[j][k] - 2.0 * a[i] * a[k]
            else:
                dq_jk = dq_dict[i][k] - 2.0 * a[j] * a[k]
            for row, col in [(j, k), (k, j)]:
                if k in j_set:
                    d_old = (-dq_dict[row][col], row, col)
                else:
                    d_old = None
                dq_dict[row][col] = dq_jk
                if len(dq_heap[row]) > 0:
                    d_oldmax = dq_heap[row].h[0]
                else:
                    d_oldmax = None
                d = (-dq_jk, row, col)
                if d_old is None:
                    dq_heap[row].push(d)
                else:
                    dq_heap[row].update(d_old, d)
                if d_oldmax is None:
                    H.push(d)
                else:
                    if dq_heap[row].h[0] != d_oldmax:
                        H.update(d_oldmax, dq_heap[row].h[0])
        i_neighbors = dq_dict[i].keys()
        for k in i_neighbors:
            dq_old = dq_dict[k][i]
            del dq_dict[k][i]
            if k != j:
                for row, col in [(k, i), (i, k)]:
                    d_old = (-dq_old, row, col)
                    if dq_heap[row].h[0] == d_old:
                        dq_heap[row].remove(d_old)
                        H.remove(d_old)
                        if len(dq_heap[row]) > 0:
                            H.push(dq_heap[row].h[0])
                    else:
                        dq_heap[row].remove(d_old)

        del dq_dict[i]
        dq_heap[i] = MappedQueue()
        a[j] += a[i]
        a[i] = 0
    heap = []
    for j in communities:
        heapq.heappush(heap, (a[j], set(communities[j])))
    while len(heap) > K:
        weight1, com1 = heapq.heappop(heap)
        weight2, com2 = heapq.heappop(heap)
        com1.update(com2)
        heapq.heappush(heap, (weight1 + weight2, com1))
    communities = [x[1] for x in heap]
    r = torch.zeros(node_num, K)
    for i, c in enumerate(communities):
        for v in c:
            r[v, i] = 1
    return r