Esempio n. 1
0
 def test_multiGraph(self):
     G = metaknowledge.diffusionGraph(self.RC, self.RC, labelEdgesBy='PY')
     metaknowledge.dropEdges(G, dropSelfLoops=True)
     #multigraphs have issues their edge counts are somewhat unpredictable
     self.assertEqual(
         metaknowledge.graphStats(G,
                                  stats=('nodes', 'isolates', 'loops'),
                                  sentenceString=True),
         'The graph has 42 nodes, 0 isolates and 0 self loops')
 def test_dropEdges(self):
     metaknowledge.dropEdges(self.G,
                             minWeight=1,
                             maxWeight=3,
                             dropSelfLoops=True)
     self.assertEqual(
         metaknowledge.graphStats(self.G, sentenceString=True),
         "The graph has 493 nodes, 12711 edges, 0 isolates, 0 self loops, a density of 0.104809 and a transitivity of 0.588968"
     )
     self.assertTrue(self.G.edge['Imbert C, 1975, NOUV REV OPT']
                     ['Fainman Y, 1984, APPL OPTICS']['weight'] == 1)
def getThresholds(clargs, grph):
    thresDict = collections.OrderedDict([
    ('0', "Continue"),
    ('1', "Drop isolates"),
    ('2', "Remove self loops"),
    ('3', "Remove edges below some weight"),
    ('4', "Remove edges above some weight"),
    ('5', "Remove nodes below some degree"),
    ('6', "Remove nodes above some degree"),
    ])
    print("The network contains {0} nodes and {1} edges, of which {2} are isolated and {3} are self loops.".format(len(grph.nodes()), len(grph.edges()), len(nx.isolates(grph)), len(grph.selfloop_edges())))
    thresID = int(inputMenu(thresDict, header = "What type of filtering to you want? "))
    if thresID == 0:
        return grph
    elif thresID == 1:
        metaknowledge.dropNodesByDegree(grph, minDegree = 1)
        return getThresholds(clargs, grph)
    elif thresID == 2:
        metaknowledge.dropEdges(grph, dropSelfLoops = True)
        return getThresholds(clargs, grph)
    elif thresID == 3:
        metaknowledge.dropEdges(grph, minWeight = getNum("What is the minumum weight for an edge to be included? "))
        return getThresholds(clargs, grph)
    elif thresID == 4:
        metaknowledge.dropEdges(grph, minWeight = getNum("What is the maximum weight for an edge to be included? "))
        return getThresholds(clargs, grph)
    elif thresID == 5:
        metaknowledge.dropNodesByDegree(grph, minDegree = getNum("What is the minumum degree for an edge to be included? "))
        return getThresholds(clargs, grph)
    else:
        metaknowledge.dropNodesByDegree(grph, minDegree = getNum("What is the maximum degree for an edge to be included? "))
        return getThresholds(clargs, grph)
def getThresholds(clargs, grph):
    thresDict = collections.OrderedDict([
    ('0', "Continue"),
    ('1', "Drop isolates"),
    ('2', "Remove self loops"),
    ('3', "Remove edges below some weight"),
    ('4', "Remove edges above some weight"),
    ('5', "Remove nodes below some degree"),
    ('6', "Remove nodes above some degree"),
    ])
    print("The network contains {0} nodes and {1} edges, of which {2} are isolated and {3} are self loops.".format(len(list(grph.nodes())), len(list(grph.edges())), len(list(nx.isolates(grph))), len(list(grph.selfloop_edges()))))
    thresID = int(inputMenu(thresDict, header = "What type of filtering to you want? "))
    if thresID == 0:
        return grph
    elif thresID == 1:
        metaknowledge.dropNodesByDegree(grph, minDegree = 1)
        return getThresholds(clargs, grph)
    elif thresID == 2:
        metaknowledge.dropEdges(grph, dropSelfLoops = True)
        return getThresholds(clargs, grph)
    elif thresID == 3:
        metaknowledge.dropEdges(grph, minWeight = getNum("What is the minumum weight for an edge to be included? "))
        return getThresholds(clargs, grph)
    elif thresID == 4:
        metaknowledge.dropEdges(grph, minWeight = getNum("What is the maximum weight for an edge to be included? "))
        return getThresholds(clargs, grph)
    elif thresID == 5:
        metaknowledge.dropNodesByDegree(grph, minDegree = getNum("What is the minumum degree for an edge to be included? "))
        return getThresholds(clargs, grph)
    else:
        metaknowledge.dropNodesByDegree(grph, minDegree = getNum("What is the maximum degree for an edge to be included? "))
        return getThresholds(clargs, grph)
 def test_multiGraph(self):
     G = metaknowledge.diffusionGraph(self.RC, self.RC, labelEdgesBy = 'PY')
     metaknowledge.dropEdges(G, dropSelfLoops = True)
     #multigraphs have issues their edge counts are somewhat unpredictable
     self.assertEqual(metaknowledge.graphStats(G, stats = ('nodes', 'isolates', 'loops'), sentenceString = True), 'The graph has 42 nodes, 0 isolates and 0 self loops')
Esempio n. 6
0
# Generating the co-author network
coauth_net = RC.networkCoAuthor()
coauth_net

# In[ ]:

# Printing the network stats
print(mk.graphStats(coauth_net))

# There are 10104 nodes (authors) in the network who are connected by 15507 edges. Of these authors, 1111 are isolates (unconnected to others).
#
# Next, we will drop self-loops and any authors with fewer than 2 edges (co-authors). For our analysis we will extract the "giant component", which is the largest subgraph of connected nodes in a network graph. The giant component typically contains a significant proportion of the nodes in the network. We'll use Python's networkx package for this and subsequent tasks.

# In[ ]:

mk.dropEdges(coauth_net, minWeight=2, dropSelfLoops=True)
giant_coauth = max(nx.connected_component_subgraphs(coauth_net), key=len)
print(mk.graphStats(giant_coauth))

# We are left with 265 authors, all of whom have at least two co-authors. We can see the graph density has gone up because of our filtering criteria.
#
# Centrality is a key concept in network analysis. The degree, closeness, betweenness and eigenvector centralities tell us which nodes (authors) are the most important. These are calculated from the number of links to other nodes, the length of the paths to other nodes, the number of times the node acts as a bridge along the shortest path between other nodes, and the relative influence of the node, respectively.
#
# Let's compute the centrality scores in our co-author graph.

# In[ ]:

# Computing centrality scores
deg = nx.degree_centrality(giant_coauth)
clo = nx.closeness_centrality(giant_coauth)
bet = nx.betweenness_centrality(giant_coauth)
 def test_dropEdges(self):
     metaknowledge.dropEdges(self.G, minWeight = 1, maxWeight = 3, dropSelfLoops = True)
     self.assertEqual(metaknowledge.graphStats(self.G), "The graph has 492 nodes, 12660 edges, 0 isolates, 0 self loops, a density of 0.104813 and a transitivity of 0.58952")
     self.assertTrue(self.G.edge['Imbert C, 1975, NOUV REV OPT']['Fainman Y, 1984, APPL OPTICS']['weight'] == 1)