Example #1
0
def get_base_modularity_matrix(network):
    '''
    Obtain the modularity matrix for the whole network.  Assumes any edge weights
    use the key 'weight' in the edge attribute.

    Parameters
    ----------
    network : nx.Graph or nx.DiGraph
        The network of interest

    Returns
    -------
    np.matrix
        The modularity matrix for `network`

    Raises
    ------
    TypeError
        When the input `network` does not fit either nx.Graph or nx.DiGraph
    '''

    if type(network) == nx.Graph:
        if nx.is_weighted(network):
            return sparse.csc_matrix(nx.modularity_matrix(network,weight='weight'))
        return sparse.csc_matrix(nx.modularity_matrix(network))
    elif type(network) == nx.DiGraph:
        if nx.is_weighted(network):
            return sparse.csc_matrix(nx.directed_modularity_matrix(network,weight='weight'))
        return sparse.csc_matrix(nx.directed_modularity_matrix(network))
    else:
        raise TypeError('Graph type not supported. Use either nx.Graph or nx.Digraph')
Example #2
0
def im_time(G):

    # initiate an infomap object
    im = infomap.Infomap()

    # add nodes
    im.add_nodes(G.nodes)

    # add edges and weights
    # transpose a numpy array to get arrays of first and second elements in edges
    sources = np.array(G.edges).T[0]
    targets = np.array(G.edges).T[1]
    weights = nx.get_edge_attributes(G, 'weight').values()
    if nx.is_weighted(G) == True:
        edges = zip(sources, targets, weights)
    elif nx.is_weighted(G) == False:
        edges = zip(sources, targets)
    im.add_links(edges)

    # initiate a list to store execution time for each algo
    algo_time = []

    for i in tqdm(range(10)):

        # start
        start_time = time.time()

        # run the model
        im.run()

        algo_time.append(time.time() - start_time)

    return np.mean(algo_time)
Example #3
0
def modularity(network, partition):
    '''
    Computes the modularity; works for Directed and Undirected Graphs, both
    unweighted and weighted.
    '''
    # put the network and partition into integer node format
    network,partition = transform_net_and_part(network,partition)
    # get the modularity matrix
    Q = get_base_modularity_matrix(network)
    if type(network) == nx.Graph:
        norm_fac = 2.*(network.number_of_edges())
        if nx.is_weighted(network):
            # 2*0.5*sum_{ij} A_{ij}
            norm_fac = nx.to_scipy_sparse_matrix(network).sum()
    elif type(network) == nx.DiGraph:
        norm_fac = 1.*network.number_of_edges()
        if nx.is_weighted(network):
            # sum_{ij} A_{ij}
            norm_fac = nx.to_scipy_sparse_matrix(network).sum()
    else:
        print('Invalid graph type')
        raise TypeError
    # reverse the partition dictionary
    rev_part = reverse_partition(partition)
    # get the list of all within-community pairs
    pairs = []
    for p in rev_part:
        for i,j in product(rev_part[p],rev_part[p]):
            pairs.append((i,j))
    # now sum up all the appropriate values
    return sum([Q[x] for x in pairs])/norm_fac
Example #4
0
    def test_is_weighted(self):
        G = nx.Graph()
        assert_false(nx.is_weighted(G))

        G = nx.path_graph(4)
        assert_false(nx.is_weighted(G))
        assert_false(nx.is_weighted(G, (2, 3)))

        G.add_node(4)
        G.add_edge(3, 4, weight=4)
        assert_false(nx.is_weighted(G))
        assert_true(nx.is_weighted(G, (3, 4)))

        G = nx.DiGraph()
        G.add_weighted_edges_from([('0', '3', 3), ('0', '1', -5),
                                   ('1', '0', -5), ('0', '2', 2),
                                   ('1', '2', 4), ('2', '3', 1)])
        assert_true(nx.is_weighted(G))
        assert_true(nx.is_weighted(G, ('1', '0')))

        G = G.to_undirected()
        assert_true(nx.is_weighted(G))
        assert_true(nx.is_weighted(G, ('1', '0')))

        assert_raises(nx.NetworkXError, nx.is_weighted, G, (1, 2))
Example #5
0
    def test_is_weighted(self):
        G = nx.Graph()
        assert not nx.is_weighted(G)

        G = nx.path_graph(4)
        assert not nx.is_weighted(G)
        assert not nx.is_weighted(G, (2, 3))

        G.add_node(4)
        G.add_edge(3, 4, weight=4)
        assert not nx.is_weighted(G)
        assert nx.is_weighted(G, (3, 4))

        G = nx.DiGraph()
        G.add_weighted_edges_from([
            ("0", "3", 3),
            ("0", "1", -5),
            ("1", "0", -5),
            ("0", "2", 2),
            ("1", "2", 4),
            ("2", "3", 1),
        ])
        assert nx.is_weighted(G)
        assert nx.is_weighted(G, ("1", "0"))

        G = G.to_undirected()
        assert nx.is_weighted(G)
        assert nx.is_weighted(G, ("1", "0"))

        pytest.raises(nx.NetworkXError, nx.is_weighted, G, (1, 2))
    def test_is_weighted(self):
        G = nx.Graph()
        assert_false(nx.is_weighted(G))

        G = nx.path_graph(4)
        assert_false(nx.is_weighted(G))
        assert_false(nx.is_weighted(G, (2, 3)))

        G.add_node(4)
        G.add_edge(3, 4, weight=4)
        assert_false(nx.is_weighted(G))
        assert_true(nx.is_weighted(G, (3, 4)))

        G = nx.DiGraph()
        G.add_weighted_edges_from(
            [("0", "3", 3), ("0", "1", -5), ("1", "0", -5), ("0", "2", 2), ("1", "2", 4), ("2", "3", 1)]
        )
        assert_true(nx.is_weighted(G))
        assert_true(nx.is_weighted(G, ("1", "0")))

        G = G.to_undirected()
        assert_true(nx.is_weighted(G))
        assert_true(nx.is_weighted(G, ("1", "0")))

        assert_raises(nx.NetworkXError, nx.is_weighted, G, (1, 2))
Example #7
0
    def test_is_weighted(self):
        G = nx.Graph()
        assert_false(nx.is_weighted(G))

        G = nx.path_graph(4)
        assert_false(nx.is_weighted(G))
        assert_false(nx.is_weighted(G, (2, 3)))

        G.add_node(4)
        G.add_edge(3, 4, weight=4)
        assert_false(nx.is_weighted(G))
        assert_true(nx.is_weighted(G, (3, 4)))

        G = nx.DiGraph()
        G.add_weighted_edges_from([('0', '3', 3), ('0', '1', -5),
                                   ('1', '0', -5), ('0', '2', 2),
                                   ('1', '2', 4), ('2', '3', 1)])
        assert_true(nx.is_weighted(G))
        assert_true(nx.is_weighted(G, ('1', '0')))

        G = G.to_undirected()
        assert_true(nx.is_weighted(G))
        assert_true(nx.is_weighted(G, ('1', '0')))

        assert_raises(nx.NetworkXError, nx.is_weighted, G, (1, 2))
Example #8
0
def nx_to_adj_list(G: Union[nx.Graph, nx.DiGraph]) -> tuple:
    """Converts NetworkX graph to an adj_list.

  - adj_list format:
      [
        {
          (neighbor, weight),
        },
      ]
  - G.nodes() are encoded to a numerical representation [1, n], in order to be
    used as indexes of adj_list.

  Args:
    G (Graph or Digraph)

  Returns:
    adj_list (list)
    decoder (dict) : maps the numerical representation [1, n] back to G.nodes()

  Raises:
    Exception       : if G is negatively weighted
  """

    if nx.is_negatively_weighted(G):
        raise Exception("Only non-negative weighted graphs are currently"
                        " supported.")

    n = G.number_of_nodes()
    encoder = dict(zip(G.nodes, range(1, n + 1)))
    decoder = dict(zip(range(1, n + 1), G.nodes))
    adj_list = [set() for _ in range(n + 1)]

    if nx.is_directed(G):
        if nx.is_weighted(G):
            for u, v, w in G.edges.data("weight"):
                adj_list[encoder[u]].add((encoder[v], w))
        else:
            for u, v in G.edges:
                adj_list[encoder[u]].add((encoder[v], 1))
    else:
        if nx.is_weighted(G):
            for u, v, w in G.edges.data("weight"):
                adj_list[encoder[u]].add((encoder[v], w))
                adj_list[encoder[v]].add((encoder[u], w))
        else:
            for u, v in G.edges:
                adj_list[encoder[u]].add((encoder[v], 1))
                adj_list[encoder[v]].add((encoder[u], 1))

    return adj_list, encoder, decoder
Example #9
0
def test_modularity_clustering_nx(graph_file, partitions):
    # Read in the graph and get a cugraph object
    csv_data = utils.read_csv_for_nx(graph_file, read_weights_in_sp=True)

    nxG = nx.from_pandas_edgelist(
            csv_data,
            source="0",
            target="1",
            edge_attr="weight",
            create_using=nx.DiGraph(),
        )
    assert nx.is_directed(nxG) is True
    assert nx.is_weighted(nxG) is True

    cuG, isNx = ensure_cugraph_obj_for_nx(nxG)
    assert cugraph.is_directed(cuG) is True
    assert cugraph.is_weighted(cuG) is True

    # Get the modularity score for partitioning versus random assignment
    cu_score = cugraph_call(cuG, partitions)
    rand_score = random_call(cuG, partitions)

    # Assert that the partitioning has better modularity than the random
    # assignment
    assert cu_score > rand_score
Example #10
0
 def visualize(self,
               edgelabel='control',
               current_node=None,
               draw='pygraphviz'):
     """
     Visualizes a LOMAP system model.
     """
     assert edgelabel is None or nx.is_weighted(self.g, weight=edgelabel)
     if draw == 'pygraphviz':
         nx.view_pygraphviz(self.g, edgelabel)
     elif draw == 'matplotlib':
         pos = nx.get_node_attributes(self.g, 'location')
         if len(pos) != self.g.number_of_nodes():
             pos = nx.spring_layout(self.g)
         if current_node is None:
             colors = 'r'
         else:
             if current_node == 'init':
                 current_node = next(self.init.iterkeys())
             colors = dict([(v, 'r') for v in self.g])
             colors[current_node] = 'b'
             colors = colors.values()
         nx.draw(self.g, pos=pos, node_color=colors)
         nx.draw_networkx_labels(self.g, pos=pos)
         edge_labels = nx.get_edge_attributes(self.g, edgelabel)
         nx.draw_networkx_edge_labels(self.g,
                                      pos=pos,
                                      edge_labels=edge_labels)
     else:
         raise ValueError('Expected parameter draw to be either:' +
                          '"pygraphviz" or "matplotlib"!')
Example #11
0
def build_triples(graph):
    """
    Builds triples of (src, dst, distance) for each node in the graph, to all other connected nodes.
    PRE: distances in the graph are symmetric
    :param graph: networkx graph
    :return: set of triples
    """
    if nx.is_weighted(graph):
        gk = nk.nxadapter.nx2nk(graph, weightAttr="weight")
        distance_type = float
    else:
        gk = nk.nxadapter.nx2nk(graph)
        distance_type = int
    shortest_paths = nk.distance.APSP(gk).run().getDistances()
    n_nodes = len(shortest_paths)
    UNREACHABLE_DISTANCE = 1e10  # nk sets a very large distance value (~1e308) for unreachable nodes

    triples, pairs = set(), set()
    for i in range(n_nodes):
        for j in range(i + 1, n_nodes):
            distance = shortest_paths[i][j]
            if 0 < distance < UNREACHABLE_DISTANCE:
                if (
                        j, i
                ) not in pairs:  # checks that the symmetric triplets is not there
                    pairs.add((i, j))
                    triples.add((i, j, distance_type(distance)))
    return triples
Example #12
0
def clauset_newman_moore_detection(G):

    # fit the model
    if nx.is_weighted(G):
        c = greedy_modularity_communities(G, weight='weight')
    else:
        c = greedy_modularity_communities(G)

    # format the result
    communities = {}
    for node in G.nodes():
        for index, commu in enumerate(c):
            if node in sorted(commu):
                communities[node] = index

    # get the number of isolated nodes
    freq_dict = collections.Counter(communities.values())
    num_isolated_nodes = list(freq_dict.values()).count(1)

    # report the result
    print("Clauset-Newman-Moore Community Detection")
    print("----------------------------------------")
    if num_isolated_nodes == 0:
        print("Number of communities detected: {}".format(len(
            freq_dict.keys())))
    else:
        print("Number of communities detected: {}".format(
            len(freq_dict.keys()) - num_isolated_nodes))
        print("Number of nodes not in any community: {}".format(
            num_isolated_nodes))

    # return result
    out = {'algo': 'Clauset-Newman-Moore', 'communities': communities}

    return out
Example #13
0
def elimina_arestas(nx_grafo, distancia_max):
    grafo_incompleto = nx.Graph([(u, v, d)
                                 for u, v, d in nx_grafo.edges(data=True)
                                 if d['weight'] < distancia_max])
    print(nx.info(grafo_incompleto))
    print('O grafo tem pesos? ' + str(nx.is_weighted(grafo_incompleto)))
    return grafo_incompleto
Example #14
0
def pass_to_ranks(G, nedges = 0, weightcol='weight'):
    """
    Passes an adjacency matrix to ranks.
     Inputs
        G - A networkx graph 
    Outputs
        PTR(G) - The passed to ranks version of the adjacency matrix of G
    """
    
    if type(G) == nx.classes.graph.Graph:
        nedges = len(G.edges)
        edges= np.zeros(nedges) #declare float array 
        #loop over the edges and store in an array
         if nx.is_weighted(G, weight=weightcol) == False:
            raise IOError('Weight column not found.')
        
        else:
            j = 0
            for source, target, data in G.edges(data=True):
                edges[j] = data[weightcol]
                j += 1
            
            ranked_values = rankdata(edges)
            #loop through the edges and assign the new weight:
            j = 0
            for source, target, data in G.edges(data=True):
				#This is meant to scale edge weights between 0 and 2
                data[weightcol] = ranked_values[j]*2/(nedges + 1)
                j += 1				
 
        return G
Example #15
0
 def visualize(self, edgelabel='control', current_node=None,
               draw='pygraphviz'):
     """
     Visualizes a LOMAP system model.
     """
     assert edgelabel is None or nx.is_weighted(self.g, weight=edgelabel)
     if draw == 'pygraphviz':
         nx.view_pygraphviz(self.g, edgelabel)
     elif draw == 'matplotlib':
         pos = nx.get_node_attributes(self.g, 'location')
         if len(pos) != self.g.number_of_nodes():
             pos = nx.spring_layout(self.g)
         if current_node is None:
             colors = 'r'
         else:
             if current_node == 'init':
                 current_node = next(self.init.iterkeys())
             colors = dict([(v, 'r') for v in self.g])
             colors[current_node] = 'b'
             colors = colors.values()
         nx.draw(self.g, pos=pos, node_color=colors)
         nx.draw_networkx_labels(self.g, pos=pos)
         edge_labels = nx.get_edge_attributes(self.g, edgelabel)
         nx.draw_networkx_edge_labels(self.g, pos=pos,
                                      edge_labels=edge_labels)
     else:
         raise ValueError('Expected parameter draw to be either:'
                          + '"pygraphviz" or "matplotlib"!')
Example #16
0
def main():
    header_list=["a","b","w"]
    E=pd.read_csv('mammalia-voles-bhp-trapping-55.edges',sep=' ' ,header=None, names=header_list)
    G=nx.from_pandas_edgelist(E,"a","b",["w"])
    if (not nx.is_weighted(G)):
        G=addWeight1(G)
    ### Draw Graph ###
    pos = nx.spring_layout(G)
    nx.draw(G,pos,with_labels=True)
    plt.figure(figsize=(12,8))
    plt.show()
    ##################
    q_a=noNodesEdges(G)
    q_b=averageDegree(G)
    q_c=density(G)
    if (nx.is_connected(G)):
        q_d=diameter(G)
    else:
        print("Since the graph isn't connected; diamter of the graph:",inf)
     ## FROM LIBRARY DIAMTER CALCULATION FOR WEIGHTED GRAPH###
    shortest1 = nx.shortest_path_length(G, weight="w")
    shortest2 = dict(shortest1)
    ecc = nx.eccentricity(G, sp=shortest2)
    diam = nx.diameter(G, e=ecc)
    print("From the Library, Diameter:", diam)
    ##########################################################
    q_e=clusteringCoefficient(G)
    print("Average Clustering Coefficient:",round(statistics.mean(q_e),6))
Example #17
0
def ld_time(G):

    # initiate an igraph object
    g = ig.Graph()

    # add vertices
    g.add_vertices(G.nodes)

    # add edges
    g.add_edges(G.edges)

    # add weights
    if nx.is_weighted(G):
        g.es['weight'] = list(nx.get_edge_attributes(G, 'weight').values())

    # initiate a list to store execution time for each algo
    algo_time = []

    for i in tqdm(range(10)):

        # start
        start_time = time.time()

        # fit the model
        partition = leidenalg.find_partition(
            g, leidenalg.ModularityVertexPartition)

        algo_time.append(time.time() - start_time)

    return np.mean(algo_time)
Example #18
0
def louvain_detection(G):

    # fit the model
    if nx.is_weighted(G):
        communities = community_louvain.best_partition(G, weight='weight')
    else:
        communities = community_louvain.best_partition(G)

    # get the number of isolated nodes
    freq_dict = collections.Counter(communities.values())
    num_isolated_nodes = list(freq_dict.values()).count(1)

    # report the result
    print("Louvain Community Detection")
    print("---------------------------")
    if num_isolated_nodes == 0:
        print("Number of communities detected: {}".format(len(
            freq_dict.keys())))
    else:
        print("Number of communities detected: {}".format(
            len(freq_dict.keys()) - num_isolated_nodes))
        print("Number of nodes not in any community: {}".format(
            num_isolated_nodes))

    # return result
    out = {'algo': 'Louvain', 'communities': communities}

    return out
Example #19
0
def calculate_internal_external_densities(
    graph: nx.Graph,
    partitions: Dict[Any, Any],
    weight_attribute: str = 'weight'
) -> Tuple[Dict[Any, List[float]], Dict[Any, List[float]]]:
    """
    Calculates the internal and external densities given a graph and a node membership dictionary. Density is defined
    by 'How to Make the Team: Social Networks vs. Demography as Criteria for Designing Effective Teams' as being
    the mean strength of tie between members of the set. In other words, density is the normalized average of edge
    weights by node.

    For a given node, the density is the sum of all edge weights divided by the maximum edge weight for that node.

    For internal density, only the edge's whose target node is in the same membership group will be summed. Similarly,
    for external density, only the edge's whose target node is not in the same membership group will be summed.

    See also:
    Reagans, R., Zuckerman, E., & McEvily, B. (2004).
    How to Make the Team: Social Networks vs. Demography as Criteria for Designing Effective Teams.
    Administrative Science Quarterly, 49(1), 101–133. https://doi.org/10.2307/4131457

    :param graph: A weighted graph that the internal density will be calculated over
    :param Dict[any, int] partitions: A dictionary for the graph with each key being a node id and each value is
        the membership for that node id. Often this will be a partition dictionary calculated from
        topologic.louvain.best_partition
    :param str weight_attribute: The key to the weight column on the graph's edges

    :return: A tuple of two dictionaries. The first is the internal density and the second is the external density
    :rtype: Tuple[Dict[Any, List[float]], Dict[Any, List[float]]]
    """
    if not nx.is_weighted(graph, weight=weight_attribute):
        raise ValueError('The graph must be weighted.')

    # build a dictionary where the key is a membership_id and the value is a list of nodes that belong to that
    # membership
    membership_inverted: Dict[Any, List[Any]] = collections.defaultdict(list)
    for key in partitions.keys():
        membership_inverted[partitions[key]].append(key)

    internal_density: Dict[Any, List[float]] = collections.defaultdict(list)
    external_density: Dict[Any, List[float]] = collections.defaultdict(list)

    for partition_id in membership_inverted.keys():
        for node in membership_inverted[partition_id]:
            max_weight = max((weight for source, target, weight in graph.edges(
                node, data=weight_attribute)))

            for source, target, weight in graph.edges(node,
                                                      data=weight_attribute):
                target_partition = partitions[target]
                density_for_node = weight / max_weight

                if target_partition == partition_id:
                    internal_density[partition_id].append(density_for_node)
                else:
                    external_density[partition_id].append(density_for_node)

    return internal_density, external_density
Example #20
0
def read_nxgraph(input_file):
    G = nx.read_gpickle(input_file)
    if not nx.is_weighted(G, weight='weight'):
        print('G is not weighted -> assign weight 1 to each edge')
        for edge in G.edges():
            G[edge[0]][edge[1]]['weight'] = 1
    else:
        print('G is weighted')
    return G
Example #21
0
 def __init__(self,
              graph: nx.DiGraph or nx.Graph,
              damping_factor: float = 0.85):
     super(TextRank, self).__init__()
     self.graph = graph
     self.damping_factor = damping_factor
     self.directed = graph.is_directed()
     self.scores: List[float] = []
     assert nx.is_weighted(graph)
Example #22
0
def pass_to_ranks(G, nedges=0):
    """
    Passes an adjacency matrix to ranks.

    Inputs
        G - A networkx graph or 1 x n nd arrayz
    Outputs
        PTR(G) - The passed to ranks version of the adjacency matrix of G
    """

    if type(G) == networkx.classes.graph.Graph:
        nedges = len(G.edges)
        edges = np.repeat(0, nedges)
        #loop over the edges and store in an array
        if networkx.is_weighted(G):
            j = 0
            for u, v, d in G.edges(data=True):
                edges[j] = d['weight']
                j += 1

            ranked_values = rankdata(edges)
            #loop through the edges and assign the new weight:
            j = 0
            for u, v, d in G.edges(data=True):
                #edges[j] = (ranked_values[j]*2)/(nedges + 1)
                d['weight'] = ranked_values[j] * 2 / (nedges + 1)
                j += 1

        return networkx.to_numpy_array(G)

    elif type(G) == np.ndarray:
        n, _ = G.shape
        unraveled_sim = G.ravel().copy()
        sorted_indices = np.argsort(unraveled_sim)

        if nedges == 0:  # Defaulted to (n choose 2), matrix assumed to be symmetric
            E = int((n**2 - n) / 2)  # or E = int(len(single)/a1_sim.shape[0])
            for i in range(E):
                unraveled_sim[sorted_indices[(n - 2) + 2 * (i + 1)]] = i / E
                unraveled_sim[sorted_indices[(n - 2) + 2 * (i + 1) +
                                             1]] = i / E

        else:
            for i in range(nedges):
                unraveled_sim[sorted_indices[
                    -2 * i -
                    1]] = (nedges -
                           i) / nedges  # assumes symmetric (undirected) matrix
                unraveled_sim[sorted_indices[-2 * i -
                                             2]] = (nedges - i) / nedges

            for i in range(n**2 - int(2 * nedges)):
                unraveled_sim[sorted_indices[i]] = 0  # set rest of edges to 0

        ptred = unraveled_sim.reshape((n, n))  # back to similarity mat

        return ptred
Example #23
0
def infomap_detection(G):

    # initiate an infomap object
    im = infomap.Infomap("--two-level")

    # add nodes
    im.add_nodes(G.nodes)

    # add edges and weights
    # transpose a numpy array to get arrays of first and second elements in edges
    sources = np.array(G.edges).T[0]
    targets = np.array(G.edges).T[1]
    weights = nx.get_edge_attributes(G, 'weight').values()
    if nx.is_weighted(G) == True:
        edges = zip(sources, targets, weights)
    elif nx.is_weighted(G) == False:
        edges = zip(sources, targets)
    im.add_links(edges)

    # run the model
    im.run()

    # get a dictionary with node id as key and respective community as value
    communities = im.get_modules()

    # get the number of isolated nodes
    freq_dict = collections.Counter(communities.values())
    num_isolated_nodes = list(freq_dict.values()).count(1)

    # report the result
    print("Infomap Community Detection")
    print("---------------------------")
    if num_isolated_nodes == 0:
        print("Number of communities detected: {}".format(im.num_top_modules))
    else:
        print("Number of communities detected: {}".format(im.num_top_modules -
                                                          num_isolated_nodes))
        print("Number of nodes not in any community: {}".format(
            num_isolated_nodes))

    # return result
    out = {'algo': 'Infomap', 'communities': communities}

    return out
Example #24
0
def cria_grafo_networkx(df):
    print('Iniciando criação do grafo')
    grafo_ubs = nx.MultiDiGraph()
    for index, row in df.iterrows():
        grafo_ubs.add_edge(row['origem'],
                           row['destino'],
                           weight=row['distancia'])
    print(nx.info(grafo_ubs))
    print('O grafo tem pesos? ' + str(nx.is_weighted(grafo_ubs)))
    return grafo_ubs
Example #25
0
 def test_load_roadnet_ca(self):
     path = raw_roadnet_ca
     graph = load_raw(path)
     assert is_weighted(graph) is False
     assert is_directed(graph) is False
     assert graph.has_edge('418', '5')
     assert graph.has_edge('108', '13')
     assert graph.has_edge('3248', '19')
     assert graph.has_edge('108', '8') is False
     assert graph.has_edge('16', '14') is False
     dump_graphml(graph, processed_roadnet_ca)
Example #26
0
def my_weighted_maximum_cut(G, sampler=None, **sampler_args):
    """Returns an approximate weighted maximum cut.

    Defines an Ising problem with ground states corresponding to
    a weighted maximum cut and uses the sampler to sample from it.

    A weighted maximum cut is a subset S of the vertices of G that
    maximizes the sum of the edge weights between S and its
    complementary subset.

    Parameters
    ----------
    G : NetworkX graph
        The graph on which to find a weighted maximum cut. Each edge in G should
        have a numeric `weight` attribute.

    sampler
        A binary quadratic model sampler. A sampler is a process that
        samples from low energy states in models defined by an Ising
        equation or a Quadratic Unconstrained Binary Optimization
        Problem (QUBO). A sampler is expected to have a 'sample_qubo'
        and 'sample_ising' method. A sampler is expected to return an
        iterable of samples, in order of increasing energy. If no
        sampler is provided, one must be provided using the
        `set_default_sampler` function.

    sampler_args
        Additional keyword parameters are passed to the sampler.

    Returns
    -------
    S : set
        A maximum cut of G.

    Notes
    -----
    Samplers by their nature may not return the optimal solution. This
    function does not attempt to confirm the quality of the returned
    sample.

    """
    # In order to form the Ising problem, we want to increase the
    # energy by 1 for each edge between two nodes of the same color.
    # The linear biases can all be 0.
    h = {v: 0. for v in G}
    if nx.is_weighted(G):
        J = {(u, v): G[u][v]['weight'] for u, v in G.edges}
    else:
        J = {(u, v): 1 for u, v in G.edges}

    # draw the lowest energy sample from the sampler
    response = sampler.sample_ising(h, J, **sampler_args)

    return response
Example #27
0
 def test_load_openflights(self):
     path = raw_openflights
     graph = load_raw(path)
     assert is_weighted(graph) is False
     assert is_directed(graph) is True
     assert graph.has_edge('2', '4')
     assert graph.has_edge('4', '2')
     assert graph.has_edge('482', '61')
     assert graph.has_edge('592', '308')
     assert graph.has_edge('551', '375')
     assert graph.has_edge('375', '551') is False
     dump_graphml(graph, processed_openflights)
Example #28
0
def assert_is_weighted(graph: nx.Graph, weight_column: str = 'weight'):
    """
    Asserts that a graph object is a weighted graph

    :param graph: A graph to check
    :param weight_column: Weight column
    :raises UnweightedGraphError: Graph is not weighted by the requested weight column
    """
    if not nx.is_weighted(graph, weight=weight_column):
        raise UnweightedGraphError(
            "Weight column [{0}] not found in every graph edge attribute".
            format(weight_column))
Example #29
0
def test_nx_convert_undirected(graph_file):
    # read data and create a Nx Graph
    nx_df = utils.read_csv_for_nx(graph_file)
    nxG = nx.from_pandas_edgelist(nx_df, "0", "1", create_using=nx.Graph)
    assert nx.is_directed(nxG) is False
    assert nx.is_weighted(nxG) is False

    cuG = cugraph.utilities.convert_from_nx(nxG)
    assert cuG.is_directed() is False
    assert cuG.is_weighted() is False

    _compare_graphs(nxG, cuG, has_wt=False)
Example #30
0
 def test_load_usair97(self):
     path = raw_usair97
     graph = load_raw(path)
     assert is_weighted(graph) is True
     assert is_directed(graph) is False
     assert graph['2']['1']['weight'] == 0.0436
     assert graph['13']['6']['weight'] == 0.0143
     assert graph['144']['8']['weight'] == 0.2746
     assert graph['119']['95']['weight'] == 0.0323
     assert graph.has_edge('321', '163') is False
     assert graph.has_edge('230', '168') is False
     dump_graphml(graph, processed_usair97)
Example #31
0
def gn_time(G):

    # define a function to compute weighted centrality betweenness
    def most_central_edge(G):
        centrality = betweenness(G, weight='weight')
        return max(centrality, key=centrality.get)

    # initiate a list to store execution time for each algo
    algo_time = []

    for i in tqdm(range(10)):

        # start
        start_time = time.time()

        # fit the model
        if nx.is_weighted(G):
            solutions = girvan_newman(G, most_valuable_edge=most_central_edge)
        else:
            solutions = girvan_newman(G)

        # assign the number of times partitioning
        k = len(G.edges)

        # register modularity scores
        modularity_scores = dict()

        # initiate a maximum modularity score
        max_score = 0

        # initiate count (stopping criterion)
        count = 0

        # iterate over solutions
        for community in itertools.islice(solutions, k):
            solution = list(sorted(c) for c in community)
            score = modularity(G, solution)
            # store modularity score
            modularity_scores[len(solution)] = score
            if score > max_score:
                # save the community structure with highest modularity score
                community_structure = list(solution)
                max_score = score
                count = 0
            else:
                count = count + 1
            if count == 5:
                break

        algo_time.append(time.time() - start_time)

    return np.mean(algo_time)
Example #32
0
def remap_node_ids(
        graph: nx.Graph,
        weight_attribute: str = "weight",
        weight_default: float = 1.0) -> Tuple[nx.Graph, Dict[Any, str]]:
    """
    Given a graph with arbitrarily types node ids, return a new graph that contains the exact same edgelist
    except the node ids are remapped to a string representation.

    Parameters
    ----------
    graph : nx.Graph
        A graph that has node ids of arbitrary types.
    weight_attribute : str,
        Default is ``weight``. An optional attribute to specify which column in your graph contains the weight value.
    weight_default : float,
        Default edge weight to use if a weight is not found on an edge in the graph
    Returns
    -------
    Tuple[nx.Graph, Dict[Any, str]]
        A new graph that contains the same edges except the node ids are remapped to strings. The keys in
        the dictionary are the old node ids and the values are the newly remapped node ids.

    Raises
    ------
    TypeError
    """
    if not isinstance(graph, nx.Graph):
        raise TypeError("graph must be of type nx.Graph")

    if not nx.is_weighted(graph, weight=weight_attribute):
        warnings.warn(
            f'Graph has at least one unweighted edge using weight_attribute "{weight_attribute}". '
            f'Defaulting unweighted edges to "{weight_default}"')

    node_id_dict: Dict[Any, str] = dict()
    graph_remapped = type(graph)()

    for source, target, weight in graph.edges(data=weight_attribute,
                                              default=weight_default):
        if source not in node_id_dict:
            node_id_dict[source] = str(len(node_id_dict.keys()))

        if target not in node_id_dict:
            node_id_dict[target] = str(len(node_id_dict.keys()))

        graph_remapped.add_edge(node_id_dict[source], node_id_dict[target])

        graph_remapped[node_id_dict[source]][
            node_id_dict[target]][weight_attribute] = weight

    return graph_remapped, node_id_dict
    def __init__(self, G, weight='weight'):
        if not nx.is_weighted(G, weight=weight):
            raise nx.NetworkXError('Graph is not weighted.')

        self.G = G
        self.dist = {v: 0 for v in G}
        self.pred = {v: [] for v in G}
        self.weight = _weight_function(G, weight)
        # Calculate distance of shortest paths
        self.dist_bellman = _bellman_ford(G,
                                          list(G),
                                          self.weight,
                                          pred=self.pred,
                                          dist=self.dist)
Example #34
0
def draw_grid(ts, edgelabel='control', prop_colors=None, current_node=None):
    assert edgelabel is None or nx.is_weighted(ts.g, weight=edgelabel)
    pos = nx.get_node_attributes(ts.g, 'location')
    if current_node == 'init':
        current_node = next(ts.init.iterkeys())
    colors = dict([(v, 'w') for v in ts.g])
    if current_node:
        colors[current_node] = 'b'
    for v, d in ts.g.nodes_iter(data=True):
        if d['prop']:
            colors[v] = prop_colors[tuple(d['prop'])]
    colors = colors.values()
    labels = nx.get_node_attributes(ts.g, 'label')
    nx.draw(ts.g, pos=pos, node_color=colors)
    nx.draw_networkx_labels(ts.g, pos=pos, labels=labels)
    edge_labels = nx.get_edge_attributes(ts.g, edgelabel)
    nx.draw_networkx_edge_labels(ts.g, pos=pos,
                                 edge_labels=edge_labels)
Example #35
0
def johnson(G, weight='weight', new_weight=None):
    """Compute shortest paths between all nodes in a weighted graph using
    Johnson's algorithm.

    Parameters
    ----------
    G : NetworkX graph

    weight: string, optional (default='weight')
        Edge data key corresponding to the edge weight.

    new_weight: string, optional (default=None)
        Edge data key corresponding to the new edge weight after graph transformation.

    Returns
    -------
    distance : dictionary
       Dictionary, keyed by source and target, of shortest paths.

    Raises
    ------
    NetworkXError
       If given graph is not weighted.

    Examples
    --------
    >>> import networkx as nx
    >>> graph = nx.DiGraph()
    >>> graph.add_weighted_edges_from([('0', '3', 3), ('0', '1', -5),
    ... ('0', '2', 2), ('1', '2', 4), ('2', '3', 1)])
    >>> paths = nx.johnson(graph, weight='weight')
    >>> paths['0']['2']
    ['0', '1', '2']

    Notes
    ------
    Johnson's algorithm is suitable even for graphs with negative weights. It
    works by using the Bellman–Ford algorithm to compute a transformation of
    the input graph that removes all negative weights, allowing Dijkstra's
    algorithm to be used on the transformed graph.

    It may be faster than Floyd - Warshall algorithm in sparse graphs.
    Algorithm complexity: O(V^2 * logV + V * E)

    See Also
    --------
    floyd_warshall_predecessor_and_distance
    floyd_warshall_numpy
    all_pairs_shortest_path
    all_pairs_shortest_path_length
    all_pairs_dijkstra_path
    bellman_ford
    """
    if not nx.is_weighted(G, weight=weight):
        raise nx.NetworkXError('Graph is not weighted.')

    new_node = nx.utils.generate_unique_node()
    G.add_weighted_edges_from((new_node, node, 0) for node in G.nodes())

    # Calculate distance of shortest paths
    dist = nx.bellman_ford(G, source=new_node, weight=weight)[1]

    delete = False
    if new_weight is None:
        delete = True
        new_weight = uuid.uuid1()

    for u, v, w in G.edges(data=True):
        w[new_weight] = w[weight] + dist[u] - dist[v]

    G.remove_node(new_node)
    all_pairs_path = nx.all_pairs_dijkstra_path(G, weight=new_weight)

    if delete:
        for u, v, w in G.edges(data=True):
            if new_weight in w:
                w.pop(new_weight)

    return all_pairs_path
Example #36
0
def johnson(G, weight='weight'):
    r"""Uses Johnson's Algorithm to compute shortest paths.

    Johnson's Algorithm finds a shortest path between each pair of
    nodes in a weighted graph even if negative weights are present.

    Parameters
    ----------
    G : NetworkX graph

    weight : string or function
       If this is a string, then edge weights will be accessed via the
       edge attribute with this key (that is, the weight of the edge
       joining `u` to `v` will be ``G.edge[u][v][weight]``). If no
       such edge attribute exists, the weight of the edge is assumed to
       be one.

       If this is a function, the weight of an edge is the value
       returned by the function. The function must accept exactly three
       positional arguments: the two endpoints of an edge and the
       dictionary of edge attributes for that edge. The function must
       return a number.

    Returns
    -------
    distance : dictionary
       Dictionary, keyed by source and target, of shortest paths.

    Raises
    ------
    NetworkXError
       If given graph is not weighted.

    Examples
    --------
    >>> import networkx as nx
    >>> graph = nx.DiGraph()
    >>> graph.add_weighted_edges_from([('0', '3', 3), ('0', '1', -5),
    ... ('0', '2', 2), ('1', '2', 4), ('2', '3', 1)])
    >>> paths = nx.johnson(graph, weight='weight')
    >>> paths['0']['2']
    ['0', '1', '2']

    Notes
    -----
    Johnson's algorithm is suitable even for graphs with negative weights. It
    works by using the Bellman–Ford algorithm to compute a transformation of
    the input graph that removes all negative weights, allowing Dijkstra's
    algorithm to be used on the transformed graph.

    The time complexity of this algorithm is `O(n^2 \log n + n m)`,
    where `n` is the number of nodes and `m` the number of edges in the
    graph. For dense graphs, this may be faster than the Floyd–Warshall
    algorithm.

    See Also
    --------
    floyd_warshall_predecessor_and_distance
    floyd_warshall_numpy
    all_pairs_shortest_path
    all_pairs_shortest_path_length
    all_pairs_dijkstra_path
    bellman_ford

    """
    if not nx.is_weighted(G, weight=weight):
        raise nx.NetworkXError('Graph is not weighted.')

    dist = {v: 0 for v in G}
    pred = {v: None for v in G}
    weight = _weight_function(G, weight)
    # Calculate distance of shortest paths
    dist_bellman = _bellman_ford_relaxation(G, pred, dist, list(G), weight)[1]
    # Update the weight function to take into account the Bellman--Ford
    # relaxation distances.
    scale = lambda u, v: dist_bellman[u] - dist_bellman[v]
    new_weight = lambda u, v, d: weight(u, v, d) + scale(u, v)

    def dist_path(v):
        paths = {v: [v]}
        _dijkstra(G, v, new_weight, paths=paths)
        return paths

    return {v: dist_path(v) for v in G}
def johnson(G, weight='weight'):
    """Compute shortest paths between all nodes in a weighted graph using
    Johnson's algorithm.

    Parameters
    ----------
    G : NetworkX graph

    weight: string, optional (default='weight')
        Edge data key corresponding to the edge weight.

    Returns
    -------
    distance : dictionary
       Dictionary, keyed by source and target, of shortest paths.

    Raises
    ------
    NetworkXError
       If given graph is not weighted.

    Examples
    --------
    >>> import networkx as nx
    >>> graph = nx.DiGraph()
    >>> graph.add_weighted_edges_from([('0', '3', 3), ('0', '1', -5),
    ... ('0', '2', 2), ('1', '2', 4), ('2', '3', 1)])
    >>> paths = nx.johnson(graph, weight='weight')
    >>> paths['0']['2']
    ['0', '1', '2']

    Notes
    -----
    Johnson's algorithm is suitable even for graphs with negative weights. It
    works by using the Bellman–Ford algorithm to compute a transformation of
    the input graph that removes all negative weights, allowing Dijkstra's
    algorithm to be used on the transformed graph.

    It may be faster than Floyd - Warshall algorithm in sparse graphs.
    Algorithm complexity: O(V^2 * logV + V * E)

    """
    if not nx.is_weighted(G, weight=weight):
        raise nx.NetworkXError('Graph is not weighted.')

    dist = {v: 0 for v in G}
    pred = {v: None for v in G}

    # Calculate distance of shortest paths
    dist_bellman = _bellman_ford_relaxation(G, pred, dist, G.nodes(),
                                            weight)[1]

    if G.is_multigraph():
        get_weight = lambda u, v, data: (
            min(eattr.get(weight, 1) for eattr in data.values()) +
            dist_bellman[u] - dist_bellman[v])
    else:
        get_weight = lambda u, v, data: (data.get(weight, 1) +
                                         dist_bellman[u] - dist_bellman[v])

    all_pairs = {v: _dijkstra(G, v, get_weight, paths={v: [v]})[1] for v in G}
    return all_pairs