Beispiel #1
0
def could_be_isomorphic(G1, G2):
    """Returns False if graphs are definitely not isomorphic.
    True does NOT guarantee isomorphism.

    Parameters
    ----------
    G1, G2 : graphs
       The two graphs G1 and G2 must be the same type.

    Notes
    -----
    Checks for matching degree, triangle, and number of cliques sequences.
    """

    # Check global properties
    if G1.order() != G2.order(): return False

    # Check local properties
    d1 = G1.degree()
    t1 = nx.triangles(G1)
    c1 = nx.number_of_cliques(G1)
    props1 = [[d, t1[v], c1[v]] for v, d in d1]
    props1.sort()

    d2 = G2.degree()
    t2 = nx.triangles(G2)
    c2 = nx.number_of_cliques(G2)
    props2 = [[d, t2[v], c2[v]] for v, d in d2]
    props2.sort()

    if props1 != props2:
        return False

    # OK...
    return True
Beispiel #2
0
def could_be_isomorphic(G1,G2):
    """Returns False if graphs are definitely not isomorphic.
    True does NOT guarantee isomorphism.

    Parameters
    ----------
    G1, G2 : graphs
       The two graphs G1 and G2 must be the same type.

    Notes
    -----
    Checks for matching degree, triangle, and number of cliques sequences.
    """

    # Check global properties
    if G1.order() != G2.order(): return False

    # Check local properties
    d1 = G1.degree()
    t1=nx.triangles(G1)
    c1=nx.number_of_cliques(G1)
    props1 = [[d, t1[v], c1[v]] for v, d in d1]
    props1.sort()

    d2=G2.degree()
    t2=nx.triangles(G2)
    c2=nx.number_of_cliques(G2)
    props2 = [[d, t2[v], c2[v]] for v, d in d2]
    props2.sort()

    if props1 != props2:
        return False

    # OK...
    return True
Beispiel #3
0
def graph_could_be_isomorphic(G1,G2):
    """
    Returns False if graphs G1 and G2 are definitely not isomorphic.

    True does NOT garantee isomorphism.
  
    Checks for matching degree, triangle, and number of cliques sequences.
    """
  
    # Check global properties
    if G1.order() != G2.order(): return False
    
    # Check local properties
    d1=G1.degree(with_labels=True)
    t1=networkx.triangles(G1,with_labels=True)
    c1=networkx.number_of_cliques(G1,with_labels=True)
    props1=[ [d1[v], t1[v], c1[v]] for v in d1 ]
    props1.sort()
    
    d2=G2.degree(with_labels=True)
    t2=networkx.triangles(G2,with_labels=True)
    c2=networkx.number_of_cliques(G2,with_labels=True)
    props2=[ [d2[v], t2[v], c2[v]] for v in d2 ]
    props2.sort()

    if props1 != props2: 
#        print props1
#        print props2
        return False

    # OK...
    return True
Beispiel #4
0
def graphical_features(train_data, test_data):
    ''' Compute some structural features on the graph obtained from the questions '''
    df = pd.concat([train_data[['qid1', 'qid2']], test_data[['qid1', 'qid2']]], axis = 0, ignore_index=True)
    g = nx.Graph()
    g.add_nodes_from(df.qid1)
    edges = list(df[["qid1", "qid2"]].to_records(index=False))
    g.add_edges_from(edges)
    g.remove_edges_from(g.selfloop_edges())
    print('Get kcore dict')
    kcore_dict = nx.core_number(g)
    print('Get centrality dict')
    centrality_dict = nx.degree_centrality(g)
    print('Get closeness dict')
    closeness_dict = nx.closeness_centrality(g)
    print('Get cliques dict')
    cliques_dict = nx.number_of_cliques(g)
    
    return np.array([(min(kcore_dict[qid1], kcore_dict[qid2]), max(kcore_dict[qid1], kcore_dict[qid2]),
     min(centrality_dict[qid1], centrality_dict[qid2]), max(centrality_dict[qid1], centrality_dict[qid2]),
     min(closeness_dict[qid1], closeness_dict[qid2]), max(closeness_dict[qid1], closeness_dict[qid2]),
     min(cliques_dict[qid1], cliques_dict[qid2]), max(cliques_dict[qid1], cliques_dict[qid2]))
     for qid1, qid2 in zip(train_data.qid1, train_data.qid2)]), \
           np.array([(min(kcore_dict[qid1], kcore_dict[qid2]), max(kcore_dict[qid1], kcore_dict[qid2]),
     min(centrality_dict[qid1], centrality_dict[qid2]), max(centrality_dict[qid1], centrality_dict[qid2]),
     min(closeness_dict[qid1], closeness_dict[qid2]), max(closeness_dict[qid1], closeness_dict[qid2]),
     min(cliques_dict[qid1], cliques_dict[qid2]), max(cliques_dict[qid1], cliques_dict[qid2]))
     for qid1, qid2 in zip(test_data.qid1, test_data.qid2)])
Beispiel #5
0
 def find_cliques(self):
     ''' Function finds the number of cliques to which each vertex belongs. 
         Generates a separate list for each chain.'''
     self.cliques = []
     for chain in self.chains:
         # for each chain generate a dictionary of the number of cliques
         # to which each atom in the chain belongs
         self.cliques.append(nx.number_of_cliques(chain))
Beispiel #6
0
 def find_cliques(self):
     """ Function finds the number of cliques to which each vertex belongs.
         Generates a separate list for each chain."""
     self.cliques = []
     for chain in self.chains:
         # for each chain generate a dictionary of the number of cliques 
         # to which each atom in the chain belongs
         self.cliques.append(nx.number_of_cliques(chain))
Beispiel #7
0
    def nodes_number_of_cliques(self):
        """
        Parameters
        ----------

        Returns
        -------
        NxGraph: Graph object

        Examples
        --------
        >>>
        """
        if self.is_directed:
            return nx.number_of_cliques(self._graph.to_undirected())
        else:
            return nx.number_of_cliques(self._graph)
Beispiel #8
0
 def hash_nxgraph(g: nx.Graph):
     t = nx.triangles(g)
     c = nx.number_of_cliques(g)
     ele = nx.get_node_attributes(g, 'symbol')
     dv = g.degree
     props = [(dv[v], t[v], c[v], ele[v]) for v in g]
     props.sort()
     s = str(props)
     return hashstr2int(s)
Beispiel #9
0
    def calculate_undir_centralities(self, G, U, suffix):
        ''' Calculate centralities in undirected graph and add them as node attributes '''

        nx.set_node_attributes(G, nx.degree_centrality(U),
                               'degree_%s' % suffix)
        nx.set_node_attributes(G, nx.node_clique_number(U),
                               'clique_number_%s' % suffix)
        nx.set_node_attributes(G, nx.number_of_cliques(U),
                               'num_of_cliques_%s' % suffix)
        return G
Beispiel #10
0
 def props_for_hash(self):
     """
     see https://stackoverflow.com/questions/46999771/
     use with caution...
     """
     t = nx.triangles(self.graph)
     c = nx.number_of_cliques(self.graph)
     ele = nx.get_node_attributes(self.graph, 'symbol')
     dv = self.graph.degree
     props = [(dv[v], t[v], c[v], ele[v]) for v in self.graph]
     props.sort()
     return props
Beispiel #11
0
 def test_number_of_cliques(self):
     G = self.G
     assert nx.graph_number_of_cliques(G) == 5
     assert nx.graph_number_of_cliques(G, cliques=self.cl) == 5
     assert nx.number_of_cliques(G, 1) == 1
     assert list(nx.number_of_cliques(G, [1]).values()) == [1]
     assert list(nx.number_of_cliques(G, [1, 2]).values()) == [1, 2]
     assert nx.number_of_cliques(G, [1, 2]) == {1: 1, 2: 2}
     assert nx.number_of_cliques(G, 2) == 2
     assert (nx.number_of_cliques(G) ==
             {1: 1, 2: 2, 3: 1, 4: 2, 5: 1,
              6: 2, 7: 1, 8: 1, 9: 1, 10: 1, 11: 1})
     assert (nx.number_of_cliques(G, nodes=list(G)) ==
             {1: 1, 2: 2, 3: 1, 4: 2, 5: 1,
              6: 2, 7: 1, 8: 1, 9: 1, 10: 1, 11: 1})
     assert (nx.number_of_cliques(G, nodes=[2, 3, 4]) ==
             {2: 2, 3: 1, 4: 2})
     assert (nx.number_of_cliques(G, cliques=self.cl) ==
             {1: 1, 2: 2, 3: 1, 4: 2, 5: 1,
              6: 2, 7: 1, 8: 1, 9: 1, 10: 1, 11: 1})
     assert (nx.number_of_cliques(G, list(G), cliques=self.cl) ==
             {1: 1, 2: 2, 3: 1, 4: 2, 5: 1,
              6: 2, 7: 1, 8: 1, 9: 1, 10: 1, 11: 1})
Beispiel #12
0
 def test_number_of_cliques(self):
     G=self.G
     assert_equal(nx.graph_number_of_cliques(G),5)
     assert_equal(nx.graph_number_of_cliques(G,cliques=self.cl),5)
     assert_equal(nx.number_of_cliques(G,1),1)
     assert_equal(list(nx.number_of_cliques(G,[1]).values()),[1])
     assert_equal(list(nx.number_of_cliques(G,[1,2]).values()),[1, 2])
     assert_equal(nx.number_of_cliques(G,[1,2]),{1: 1, 2: 2})
     assert_equal(nx.number_of_cliques(G,2),2)
     assert_equal(nx.number_of_cliques(G),
                  {1: 1, 2: 2, 3: 1, 4: 2, 5: 1,
                   6: 2, 7: 1, 8: 1, 9: 1, 10: 1, 11: 1})
     assert_equal(nx.number_of_cliques(G,nodes=G.nodes()),
                  {1: 1, 2: 2, 3: 1, 4: 2, 5: 1,
                   6: 2, 7: 1, 8: 1, 9: 1, 10: 1, 11: 1})
     assert_equal(nx.number_of_cliques(G,nodes=[2,3,4]),
                  {2: 2, 3: 1, 4: 2})
     assert_equal(nx.number_of_cliques(G,cliques=self.cl),
                  {1: 1, 2: 2, 3: 1, 4: 2, 5: 1,
                   6: 2, 7: 1, 8: 1, 9: 1, 10: 1, 11: 1})
     assert_equal(nx.number_of_cliques(G,G.nodes(),cliques=self.cl),
                  {1: 1, 2: 2, 3: 1, 4: 2, 5: 1,
                   6: 2, 7: 1, 8: 1, 9: 1, 10: 1, 11: 1})
Beispiel #13
0
def cliques_strategy(G, num_seeds):
    ''' Picks the top nodes based on number of cliques per node

    Args:
        G --                the input graph
        num_seeds --        the number of seed nodes to select

    Returns: list of output nodes based on the cliques
    '''

    centralities_dict = nx.number_of_cliques(G)
    sorted_centralities = nlargest(num_seeds,
                                   centralities_dict.items(),
                                   key=operator.itemgetter(1))
    node_keys = [i[0] for i in sorted_centralities]
    return node_keys
Beispiel #14
0
def compute_node_measures(ntwk, calculate_cliques=False):
    """
    These return node-based measures
    """
    iflogger.info('Computing node measures:')
    measures = {}
    iflogger.info('...Computing degree...')
    measures['degree'] = np.array(list(ntwk.degree().values()))
    iflogger.info('...Computing load centrality...')
    measures['load_centrality'] = np.array(
        list(nx.load_centrality(ntwk).values()))
    iflogger.info('...Computing betweenness centrality...')
    measures['betweenness_centrality'] = np.array(
        list(nx.betweenness_centrality(ntwk).values()))
    iflogger.info('...Computing degree centrality...')
    measures['degree_centrality'] = np.array(
        list(nx.degree_centrality(ntwk).values()))
    iflogger.info('...Computing closeness centrality...')
    measures['closeness_centrality'] = np.array(
        list(nx.closeness_centrality(ntwk).values()))
    #    iflogger.info('...Computing eigenvector centrality...')
    #    measures['eigenvector_centrality'] = np.array(nx.eigenvector_centrality(ntwk, max_iter=100000).values())
    iflogger.info('...Computing triangles...')
    measures['triangles'] = np.array(list(nx.triangles(ntwk).values()))
    iflogger.info('...Computing clustering...')
    measures['clustering'] = np.array(list(nx.clustering(ntwk).values()))
    iflogger.info('...Computing k-core number')
    measures['core_number'] = np.array(list(nx.core_number(ntwk).values()))
    iflogger.info('...Identifying network isolates...')
    isolate_list = nx.isolates(ntwk)
    binarized = np.zeros((ntwk.number_of_nodes(), 1))
    for value in isolate_list:
        value = value - 1  # Zero indexing
        binarized[value] = 1
    measures['isolates'] = binarized
    if calculate_cliques:
        iflogger.info('...Calculating node clique number')
        measures['node_clique_number'] = np.array(
            list(nx.node_clique_number(ntwk).values()))
        iflogger.info('...Computing number of cliques for each node...')
        measures['number_of_cliques'] = np.array(
            list(nx.number_of_cliques(ntwk).values()))
    return measures
Beispiel #15
0
def compute_node_measures(ntwk, calculate_cliques=False):
    """
    These return node-based measures
    """
    iflogger.info('Computing node measures:')
    measures = {}
    iflogger.info('...Computing degree...')
    measures['degree'] = np.array(list(ntwk.degree().values()))
    iflogger.info('...Computing load centrality...')
    measures['load_centrality'] = np.array(
        list(nx.load_centrality(ntwk).values()))
    iflogger.info('...Computing betweenness centrality...')
    measures['betweenness_centrality'] = np.array(
        list(nx.betweenness_centrality(ntwk).values()))
    iflogger.info('...Computing degree centrality...')
    measures['degree_centrality'] = np.array(
        list(nx.degree_centrality(ntwk).values()))
    iflogger.info('...Computing closeness centrality...')
    measures['closeness_centrality'] = np.array(
        list(nx.closeness_centrality(ntwk).values()))
    #    iflogger.info('...Computing eigenvector centrality...')
    #    measures['eigenvector_centrality'] = np.array(nx.eigenvector_centrality(ntwk, max_iter=100000).values())
    iflogger.info('...Computing triangles...')
    measures['triangles'] = np.array(list(nx.triangles(ntwk).values()))
    iflogger.info('...Computing clustering...')
    measures['clustering'] = np.array(list(nx.clustering(ntwk).values()))
    iflogger.info('...Computing k-core number')
    measures['core_number'] = np.array(list(nx.core_number(ntwk).values()))
    iflogger.info('...Identifying network isolates...')
    isolate_list = nx.isolates(ntwk)
    binarized = np.zeros((ntwk.number_of_nodes(), 1))
    for value in isolate_list:
        value = value - 1  # Zero indexing
        binarized[value] = 1
    measures['isolates'] = binarized
    if calculate_cliques:
        iflogger.info('...Calculating node clique number')
        measures['node_clique_number'] = np.array(
            list(nx.node_clique_number(ntwk).values()))
        iflogger.info('...Computing number of cliques for each node...')
        measures['number_of_cliques'] = np.array(
            list(nx.number_of_cliques(ntwk).values()))
    return measures
Beispiel #16
0
print str(" ")

print 'Ο αριθμός κλίκας (το μέγεθος της μεγαλύτερης κλίκας) του G είναι:', nx.graph_clique_number(
    G)
# print 'The clique number (size of the largest clique) for G is:', nx.graph_clique_number(G)
# print sorted(nx.connected_components(G), key = len, reverse=True)
print str(" ")

print 'Το λεξικό των κλικών που περιέχουν κάθε κόμβο είναι:'
# print 'The dictionary of the lists of cliques containing each node:'
print nx.cliques_containing_node(G)
print str(" ")

print 'Το λεξικό του πλήθους κλικών που περιέχουν κάθε κόμβο είναι:'
# print 'The dictionary of the numbers of maximal cliques for each node:'
print nx.number_of_cliques(G)
print str(" ")

print 'Το λεξικό του μεγέθους των μεγαλύτερων κλικών που περιέχουν κάθε κόμβο είναι:'
# print 'The dictionary of the sizes of the largest maximal cliques containing each given node:'
print nx.node_clique_number(G)
print str(" ")

maxclique = [
    clq for clq in nx.find_cliques(G) if len(clq) == nx.graph_clique_number(G)
]
nodes = [n for clq in maxclique for n in clq]
H = G.subgraph(nodes)
# print H.edges()

#### ΣΧΕΔΙΑΣΜΟΣ ΚΛΙΛΩΝ ΜΕΣΑ ΣΕ ΠΕΡΙΒΑΛΛΟΜΕΝΕΣ ΧΡΩΜΑΤΙΣΜΕΝΕΣ ΠΕΡΙΟΧΕΣ
Beispiel #17
0
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import networkx as nx

# opciok: https://networkx.github.io/documentation/stable/reference/index.html

G = nx.connected_caveman_graph(4, 7)
print(nx.number_of_cliques(G))
for clique in nx.find_cliques(G):
    print(clique)
print("\n")

print(nx.degree_centrality(G))
print("\n")
print(nx.edge_betweenness_centrality(G))
print("\n")

degree_sequence = sorted(nx.degree(G).values(), reverse=True)
print(degree_sequence)
# -*- coding: utf-8 -*-
"""
AFRS - Trabalho 4

Author: Gonçalo Peres
Date: 2019/02/02
"""

import networkx as nx

g = nx.read_gml('dolphins.gml')

clique = nx.number_of_cliques(g)

print(clique)
@author: Administrator
"""

import networkx as nx 
import matplotlib.pyplot as plt 
from nx_graph_build import build_G
import community
#G = nx.read_gml(r"C:\Users\Administrator\Desktop\dolphins.gml")
G = build_G()

print(nx.number_of_nodes(G))
print(nx.number_of_edges(G))

#cliques 小圈子, nx.number_of_cliques(G, nodes = 'TR77') 返node所处的圈子位置。
#nx.number_of_cliques(G) 返回所有节点和所在的圈子,以{node:num} 字典的形式。
print(nx.number_of_cliques(G, nodes = 'TR77'))
#list of k-cliques in the network. each element contains the nodes that consist the clique.
klist = list(nx.community.k_clique_communities(G,3))
print(len(klist))
#小圈子中nx.community.k_clique_communities(G,k) k个点要求是相互有联系,既之前都有线相连, 多个小圈子组成的社区

#通过设置不同的k值,圈出的社区数量也会随之变化
#for k in range(3,6):
#    klist_new = list(nx.community.k_clique_communities(G,k))
#    print(len(klist_new))

#去除在社区中的节点,留下不属于任何社区的节点
aa = list(G.nodes())
list_1 = []
for item in klist:
    list_1.extend(item)
	print "Compute number of maximal ciiques"
	print "-------------------------------------"
	graphNumberOfCliques = nx.graph_number_of_cliques(G, cliques)
	print graphNumberOfCliques


	print "-------------------------------------"
	print "Compute size of largest maximal clique containing a given node"
	print "-------------------------------------"
	maximalCliqueSizePerNode = nx.node_clique_number(G)
	print maximalCliqueSizePerNode

	print "-------------------------------------"
	print "Compute number of maximal cliques for each node"
	print "-------------------------------------"
	noOfMaximalCliquesPerNode = nx.number_of_cliques(G)
	print noOfMaximalCliquesPerNode

	print "-------------------------------------"
	print "Compute list of cliques containing  a given node"
	print "-------------------------------------"
	lcliques = nx.cliques_containing_node(G)
	print lcliques

	print "-------------------------------------"
	print "Writing data into global file"
	print "-------------------------------------"

	globalCliqueFile = 'data/globalCliqueFile.csv'
	mode = '';
	if os.path.isfile(globalCliqueFile):
Beispiel #21
0
    G.add_edges_from(edgeTuples)

    return G


#Read in data
nodesOne = pd.read_csv('data/got-s1-nodes.csv', low_memory=False)
edgesOne = pd.read_csv('data/got-s1-edges.csv', low_memory=False)

G = create_graph(nodesOne, edgesOne)

nx.degree(G)

nx.modularity_matrix(G)

nx.density(G)

list(nx.find_cliques(G))
nx.number_of_cliques(G)

nx.clustering(G)

nx.eigenvector_centrality(G)

nx.number_of_isolates(G)
nx.isolates(G)

nx.pagerank(G)

nx.shortest_path(G)
Beispiel #22
0
print lvl2
print str(" ")

print "Ο αριθμός κλίκας (το μέγεθος της μεγαλύτερης κλίκας) του G είναι:", nx.graph_clique_number(G)
# print 'The clique number (size of the largest clique) for G is:', nx.graph_clique_number(G)
# print sorted(nx.connected_components(G), key = len, reverse=True)
print str(" ")

print "Το λεξικό των κλικών που περιέχουν κάθε κόμβο είναι:"
# print 'The dictionary of the lists of cliques containing each node:'
print nx.cliques_containing_node(G)
print str(" ")

print "Το λεξικό του πλήθους κλικών που περιέχουν κάθε κόμβο είναι:"
# print 'The dictionary of the numbers of maximal cliques for each node:'
print nx.number_of_cliques(G)
print str(" ")

print "Το λεξικό του μεγέθους των μεγαλύτερων κλικών που περιέχουν κάθε κόμβο είναι:"
# print 'The dictionary of the sizes of the largest maximal cliques containing each given node:'
print nx.node_clique_number(G)
print str(" ")

maxclique = [clq for clq in nx.find_cliques(G) if len(clq) == nx.graph_clique_number(G)]
nodes = [n for clq in maxclique for n in clq]
H = G.subgraph(nodes)
# print H.edges()


#### ΣΧΕΔΙΑΣΜΟΣ ΚΛΙΛΩΝ ΜΕΣΑ ΣΕ ΠΕΡΙΒΑΛΛΟΜΕΝΕΣ ΧΡΩΜΑΤΙΣΜΕΝΕΣ ΠΕΡΙΟΧΕΣ
def get_graph(Mat_D, Threshold, percentageConnections=False, complet=False):
    import scipy.io as sio
    import numpy as np
    import networkx as nx
    import pandas as pd
    import os
    Data = sio.loadmat(Mat_D)
    matX = Data['Correlation']  #[:tamn,:tamn]
    labels = Data['labels']
    print(np.shape(matX))
    print(np.shape(labels))
    print(np.min(matX), np.max(matX))

    if percentageConnections:
        if percentageConnections > 0 and percentageConnections < 1:
            for i in range(-100, 100):
                per = np.sum(matX > i / 100.) / np.size(matX)
                if per <= Threshold:
                    Threshold = i / 100.
                    break
            print(Threshold)
        else:
            print('The coefficient is outside rank')

    #Lista de conexion del grafo
    row, col = np.shape(matX)
    e = []
    for i in range(1, row):
        for j in range(i):
            if complet:
                e.append((labels[i], labels[j], matX[i, j]))
            else:
                if matX[i, j] > Threshold:
                    e.append((labels[i], labels[j], matX[i, j]))

    print(np.shape(e)[0], int(((row - 1) * row) / 2))

    #Generar grafo
    G = nx.Graph()
    G.add_weighted_edges_from(e)
    labelNew = list(G.nodes)

    #Metricas por grafo (ponderados)
    Dpc = nx.degree_pearson_correlation_coefficient(G, weight='weight')
    cluster = nx.average_clustering(G, weight='weight')

    #No ponderados
    estra = nx.estrada_index(G)
    tnsity = nx.transitivity(G)
    conNo = nx.average_node_connectivity(G)
    ac = nx.degree_assortativity_coefficient(G)

    #Metricas por nodo
    tam = 15
    BoolCenV = False
    BoolLoad = False
    alpha = 0.1
    beta = 1.0

    katxCN = nx.katz_centrality_numpy(G,
                                      alpha=alpha,
                                      beta=beta,
                                      weight='weight')
    bcen = nx.betweenness_centrality(G, weight='weight')
    av_nd = nx.average_neighbor_degree(G, weight='weight')
    ctr = nx.clustering(G, weight='weight')
    ranPaN = nx.pagerank_numpy(G, weight='weight')
    Gol_N = nx.hits_numpy(G)
    Dgc = nx.degree_centrality(G)
    cl_ce = nx.closeness_centrality(G)
    cluster_Sq = nx.square_clustering(G)
    centr = nx.core_number(G)
    cami = nx.node_clique_number(G)
    camiN = nx.number_of_cliques(G)
    trian = nx.triangles(G)
    colorG = nx.greedy_color(G)
    try:
        cenVNum = nx.eigenvector_centrality_numpy(G, weight='weight')
        tam = tam + 1
        BoolCenV = True
    except TypeError:
        print(
            "La red es muy pequeña y no se puede calcular este parametro gil")
    except:
        print('NetworkXPointlessConcept: graph null')
    if Threshold > 0:
        carga_cen = nx.load_centrality(G, weight='weight')  #Pesos  positivos
        BoolLoad = True
        tam = tam + 1
    #katxC=nx.katz_centrality(G, alpha=alpha, beta=beta, weight='weight')
    #cenV=nx.eigenvector_centrality(G,weight='weight')
    #cenV=nx.eigenvector_centrality(G,weight='weight')
    #Golp=nx.hits(G)
    #Gol_si=nx.hits_scipy(G)
    #ranPa=nx.pagerank(G, weight='weight')
    #ranPaS=nx.pagerank_scipy(G, weight='weight')

    matrix_datos = np.zeros((tam, np.shape(labelNew)[0]))
    tam = 15
    print(np.shape(matrix_datos))
    lim = np.shape(labelNew)[0]
    for i in range(lim):
        roi = labelNew[i]
        #print(roi)
        matrix_datos[0, i] = katxCN[roi]
        matrix_datos[1, i] = bcen[roi]
        matrix_datos[2, i] = av_nd[roi]
        matrix_datos[3, i] = ctr[roi]
        matrix_datos[4, i] = ranPaN[roi]
        matrix_datos[5, i] = Gol_N[0][roi]
        matrix_datos[6, i] = Gol_N[1][roi]
        matrix_datos[7, i] = Dgc[roi]
        matrix_datos[8, i] = cl_ce[roi]
        matrix_datos[9, i] = cluster_Sq[roi]
        matrix_datos[10, i] = centr[roi]
        matrix_datos[11, i] = cami[roi]
        matrix_datos[12, i] = camiN[roi]
        matrix_datos[13, i] = trian[roi]
        matrix_datos[14, i] = colorG[roi]
        if BoolCenV:
            matrix_datos[15, i] = cenVNum[roi]
            tam = tam + 1
        if BoolLoad:
            matrix_datos[16, i] = carga_cen[roi]
            tam = tam + 1
        #matrix_datos[0,i]=katxC[roi]
        #matrix_datos[2,i]=cenV[roi]
        #matrix_datos[7,i]=Golp[0][roi]
        #matrix_datos[9,i]=Gol_si[0][roi]
        #matrix_datos[10,i]=Golp[1][roi]
        #matrix_datos[12,i]=Gol_si[1][roi]
        #matrix_datos[22,i]=ranPa[roi]
        #matrix_datos[24,i]=ranPaS[roi]
    FuncName = [
        'degree_pearson_correlation_coefficient', 'average_clustering',
        'estrada_index', 'transitivity', 'average_node_connectivity',
        'degree_assortativity_coefficient', 'katz_centrality_numpy',
        'betweenness_centrality', 'average_neighbor_degree', 'clustering',
        'pagerank_numpy', 'hits_numpy0', 'hits_numpy1', 'degree_centrality',
        'closeness_centrality', 'square_clustering', 'core_number',
        'node_clique_number', 'number_of_cliques', 'triangles', 'greedy_color',
        'eigenvector_centrality_numpy', 'load_centrality'
    ]
    frame = pd.DataFrame(matrix_datos)
    frame.columns = labelNew
    frame.index = FuncName[6:tam]

    Resul = os.getcwd()
    out_data = Resul + '/graph_metrics.csv'
    out_mat = Resul + '/graph_metrics_global.mat'

    frame.to_csv(out_data)
    sio.savemat(
        out_mat, {
            FuncName[0]: Dpc,
            FuncName[1]: cluster,
            FuncName[2]: estra,
            FuncName[3]: tnsity,
            FuncName[4]: conNo,
            FuncName[5]: ac
        })
    return out_data, out_mat
Beispiel #24
0
print 'loading edges'

for i in temp1:
    G.add_edge(i[0], i[1])

print 'loading nodes'
for i in temp2:
    G.add_node(i[0])

print 'calc core number'
cn = nx.core_number(G)
print 'calc clique number'
kn = nx.node_clique_number(G)
print 'calc number of cliques'
kn2 = nx.number_of_cliques(G)
print 'calculating node  measures'
for u, e in G.nodes_iter(data=True):

    dr = str(nx.degree(G, u))
    cl = str(nx.clustering(G, u))
    #cc = str(nx.betweenness_centrality(G)[u])
    ct = cn[u]
    knu = kn[u]
    kn2u = kn2[u]
    #ks = str(nx.k_shell(G)[u])
    f3.write(u + " " + dr + " " + cl + " " + str(ct) + " " + str(knu) + " " +
             str(kn2u) + "\n")
f3.close()

print 'calc edge measures'
Beispiel #25
0
    (i.to_undirected().number_of_edges() * 2 /i.to_undirected().number_of_nodes()) ** 2)

    print 'Average number of neighbors'
    print sum(nx.average_neighbor_degree(i.to_undirected()).values())

    print 'Nodes'
    print nx.number_of_nodes(i.to_undirected())

    print 'edges'
    print nx.number_of_edges(i.to_undirected())

    print 'Density'
    print nx.density(trem_nets.to_undirected())

    print 'Number of cliques'
    print len(nx.number_of_cliques(i.to_undirected()))

    print 'Average Degree'
    print 2.0 * (len(i.to_undirected().edges())) / len(i.to_undirected().nodes())

    print 'Average Degree Assortativity'
    print nx.degree_assortativity_coefficient(i.to_undirected())

# things for only connected graphs
ddi = max(nx.connected_component_subgraphs(ddi_nets.to_undirected()), key=len)
print 'Network Diameter'
nx.diameter(ddi.to_undirected())

print 'Shorest paths (sum)'
sum(nx.all_pairs_shortest_path_length(ddi_owl.to_undirected()).values()[0].values())
Beispiel #26
0
        for articles in db.view('_all_docs'):  ##search quoter in autors
            j=articles['id']
            if db[j]["Author"] == quoter:    ##quoter is an author in database
                
                for quot in db[j]["Quoters"]: #search author in qouters of quoter
                    if quot == Aut:
                        H.add_node(quot)
                        H.add_node(Aut)
                        H.add_edge(Aut, quoter)

        
nx.draw(H,pos=nx.spring_layout(H))


NumOfCliqes=nx.graph_clique_number(H)
print ("Clique number of the graph : ")
print (NumOfCliqes)

#
MaxCliques = nx.find_cliques(H)
print ("All maximal cliques: ")
print(list(MaxCliques))
##
node_clique_number=nx.node_clique_number(H)
print ("Size of the largest maximal clique containing each given node")
print (node_clique_number)

number_of_cliques=nx.number_of_cliques(H)
print ("Number of maximal cliques for each node.")
print (number_of_cliques)