Exemplo n.º 1
0
def main():
    print("Default number of threads:", nk.getCurrentNumberOfThreads())
    num_threads = 16
    nk.setNumberOfThreads(64)
    print("Updated number of threads:", nk.getCurrentNumberOfThreads())
    #graph = load_from_graphtool('/home/crossb/packaged_ci/graphs/2020/Right_news_2020_ci.gt')
    graphs_2020 = load_graphs_gt('/home/crossb/packaged_ci/graphs/2020/', year='2020')
    graphs_2016 = load_graphs_gt('/home/crossb/packaged_ci/graphs/2016/', year='2016')
    #anti_trump(graphs_2020, num_threads)

    delta_df = influence_gain(graphs_2016, graphs_2020, N=20)
    #influencer_network_anlaysis(year=2016)
    #influencer_network_anlaysis(year=2020)
    #all_network_stats()
    #for bias, path in BIAS_TO_RETWEET_NETWORKS.items():
    #    graph, node_to_uid = load_from_edgelist(path, directed=True)
    #    #hub_nodes = hub_analysis(graph, node_to_uid)
    #    network_stats[bias] = network_characteristics(graph)
    #    # create top influencer network
#
    #    #political_leaning_time_series_analysis()



    return
Exemplo n.º 2
0
 def __init__(self, graph_lib, num_threads=None):
     self.graph_lib = graph_lib  # 'igraph', 'networkx', 'networkit'
     if graph_lib == "networkit":
         self.num_threads = (
             num_threads if num_threads != -1 and num_threads is not None
             else nk.getMaxNumberOfThreads()
         )  # OpenMP threads only for `networkit`.
         nk.setNumberOfThreads(self.num_threads)
     elif num_threads is not None:
         raise ValueError(
             '`num_threads` is only used for the `networkit` library.')
Exemplo n.º 3
0
Arquivo: init.py Projeto: Udopia/gbd
def init_networkit_features(api: GBD, query, hashes):
    try:
        import networkit as nk
    except ImportError as e:
        raise GBDException(
            "Module 'networkit' not found. Setup https://networkit.github.io/")
    nk.setNumberOfThreads(min(multiprocessing.cpu_count(), api.jobs))
    resultset = api.query_search(query, hashes, ["local"], collapse="MIN")
    for (hash, local) in resultset:
        result = networkit_features(hash, local, {})
        eprint(result['hashvalue'])
        for att in result['attributes']:
            eprint(att[1] + "=" + att["2"])
Exemplo n.º 4
0
def main():
    nk.setNumberOfThreads(4)
    name = 'Yeast1'
    largestComp('TextFiles/' + str(name) + '.txt')
    plt.grid(True)
    plt.plot(per, dat_dc[BEGIN:], label='Degree centrality', color='#000000', linestyle='solid')
    plt.plot(per, dat_bc[BEGIN:], label='Betweenness centrality', color='#000000', linestyle='dashed')
    plt.plot(per, dat_cc[BEGIN:], label='Closeness centrality', color='#8a8686', linestyle='solid')
    plt.plot(per, dat_ev[BEGIN:], label='EigenVector centrality', color='#8a8686', linestyle='dashed')
    plt.plot(per, dat_random[BEGIN:], label='Random', color='#d9d4d4', linestyle='solid')

    plt.title('Харьцуулалт (' + str(name) + ')')
    plt.xlabel('Устгасан оройн тоо (Хувиар)')
    plt.ylabel('Хамгийн том компонентийн хэмжээ')
    plt.legend()
    plt.savefig('NetworkDismantlingFigure/' + str(name) + '.png')
    plt.show()
Exemplo n.º 5
0
        node_color=color_map,
        font_color="black",
        edge_cmap=plt.get_cmap('BuGn'),
        label="SNP To Gene eQTL Associations Cis & Trans")
#We make circular network plot with argument in command line for the degree of connectedness and above that needs to be colored differently
#nx.draw_circular(B,with_labels=True, edge_color=['blue' if B.degree[e[0]] >= int(sys.argv[1]) else 'red' for e in B.edges], node_color=color_map, font_color="black",edge_cmap=plt.get_cmap('Blues'), label ="SNP To Gene eQTL Associations Cis & Trans" )
#plt.title("SNP to Gene eQTL Association")
plt.title('ReGen Bipartite Plot', color='magenta')
#plt.show()
print("Drawing for Circular Plot prepared")
plt.savefig(r'{}/abiPlot.png'.format(sys.argv[6]), bbox_inches='tight')
print("Graph Plotted by name abiPlot.png")
#plt.show()
plt.clf()  #Clear the figure
####################################Community Detection is Done by using Network Kit Parallel Louvain's algorithm####################################
nk.setNumberOfThreads(int(
    sys.argv[5]))  # Setting the number of Parallel Threads in OpenMP
nkG = nk.nxadapter.nx2nk(
    B, weightAttr=None)  #Now nkG is the converted graph in networkit format
communities = nk.community.detectCommunities(nkG)

#nxG = nk.nxadapter.nk2nx(communities)

print(nk.community.Modularity().getQuality(communities, nkG))

#Write the community partitioning
nk.community.writeCommunities(
    communities, r"./{}/communities.partition".format(sys.argv[6]))
#Plotting Communities (uncomment this if you want it)
nk.viztasks.drawCommunityGraph(nkG, communities)
plt.savefig(r'{}/communityPlot.png'.format(sys.argv[6]), bbox_inches='tight')
print("Communities Plotted by name communityPlot.png")
Exemplo n.º 6
0
    def create(cls, G, preset="default", config=None):
        """ creates a profile object

		Args:
			G: graph to profile
			preset: name of preset configuration: "complete", "minimal", "default"
			config: object to control some aspects of the generation behaviour (Config)
		Returns:
			profile object
		"""

        # if no custom config is given, use a preconfigured config according to preset name
        if not config:
            config = Config.createConfig(preset)

        result = cls(G, cls.__TOKEN)
        # TODO: use copy constructor instead
        result.__config = config

        kit.setNumberOfThreads(result.__parallel)

        def funcScores(instance):
            """ returns node scores """
            return instance.scores()

        def funcSizes(instance):
            """ returns partition subset sizes """
            return sorted(instance.getPartition().subsetSizes())

        if G.isDirected():
            classConnectedComponents = components.StronglyConnectedComponents
        else:
            classConnectedComponents = components.ConnectedComponents

        # internal unique name | category name | display name |
        # compute correlation within same category | value function for measures | display name (axis) | class name of measure | parameter of constructor
        for parameter in [
            ("Centrality.Degree", "Node Centrality", "Degree", True,
             funcScores, "Score", centrality.DegreeCentrality, (G, )),
            ("Centrality.CoreDecomposition", "Node Centrality",
             "k-Core Decomposition", True, funcScores, "Score",
             centrality.CoreDecomposition, (G, )),
            ("Centrality.ClusteringCoefficient", "Node Centrality",
             "Local Clustering Coefficient", True, funcScores, "Score",
             centrality.LocalClusteringCoefficient, (G, )),
            ("Centrality.PageRank", "Node Centrality", "PageRank", True,
             funcScores, "Score", centrality.PageRank, (G, )),
            ("Centrality.KPath", "Node Centrality", "k-Path Centrality", True,
             funcScores, "Score", centrality.KPathCentrality, (G, )),
            ("Centrality.Katz", "Node Centrality", "Katz Centrality", True,
             funcScores, "Score", centrality.KatzCentrality, (G, )),
            ("Centrality.Betweenness", "Node Centrality", "Betweenness", True,
             funcScores, "Score", centrality.ApproxBetweenness2, (G, 10,
                                                                  True)),
            ("Centrality.Closeness", "Node Centrality", "Closeness", True,
             funcScores, "Score", centrality.ApproxCloseness, (G, 10, True)),
            ("Partition.Communities", "Partition", "Communities", False,
             funcSizes, "Nodes per Community", community.PLM, (G, )),
            ("Partition.ConnectedComponents", "Partition",
             "Connected Components", False, funcSizes, "Nodes per Component",
             classConnectedComponents, (G, )),
            ("Partition.CoreDecomposition", "Partition",
             "k-Core Decomposition", False, funcSizes, "Nodes per Shell",
             centrality.CoreDecomposition, (G, ))
        ]:
            result.__addMeasure(parameter)

        if cls.__verbose:
            timerAll = stopwatch.Timer()
        result.__loadProperties()
        result.__loadMeasures()
        if cls.__verbose:
            if cls.__verboseLevel < 1:
                print("")
            print("\ntotal time (measures + stats + correlations): {:.2F} s".
                  format(timerAll.elapsed))
            print("total speed: {:.1F} edges/s".format(G.numberOfEdges() /
                                                       timerAll.elapsed))
        return result
Exemplo n.º 7
0
	def run(self, G):
		mt = networkit.getMaxNumberOfThreads()
		networkit.setNumberOfThreads(1)
		bc = networkit.centrality.ApproxBetweenness2(G, nSamples=42)
		bc.run()
		networkit.setNumberOfThreads(mt)
Exemplo n.º 8
0
	def run(self, G):
		mt = networkit.getMaxNumberOfThreads()
		networkit.setNumberOfThreads(1)
		bc = networkit.centrality.Betweenness(G)
		bc.run()
		networkit.setNumberOfThreads(mt)
Exemplo n.º 9
0
	def run(self, G):
		mt = networkit.getMaxNumberOfThreads()
		networkit.setNumberOfThreads(1)
		bc = networkit.centrality.ApproxBetweenness2(G, nSamples=42)
		bc.run()
		networkit.setNumberOfThreads(mt)
Exemplo n.º 10
0
	def run(self, G):
		mt = networkit.getMaxNumberOfThreads()
		networkit.setNumberOfThreads(1)
		bc = networkit.centrality.Betweenness(G)
		bc.run()
		networkit.setNumberOfThreads(mt)
Exemplo n.º 11
0
print("#LOG# program_run/0/total_workers: {}".format(16))
print("#LOG# program_run/0/workers_per_host: {}".format(16))
print("#LOG# program_run/0/graph: {}".format(sys.argv[1]))

if sys.argv[1].endswith('.bin'):
    g = graphio.ThrillGraphBinaryReader().read(sorted(glob(sys.argv[1])))
else:
    g = graphio.METISGraphReader().read(sys.argv[1])

print("#LOG# program_run/0/node_count: {}".format(g.numberOfNodes()))
print("#LOG# program_run/0/edge_count: {}".format(g.numberOfEdges()))

if 'MOAB_JOBID' in os.environ:
    print("#LOG# program_run/0/job_id: {}".format(os.environ['MOAB_JOBID']))

print("#LOG# algorithm_run/1/program_run_id: 0")
print("#LOG# algorithm_run/1/algorithm: PLM")

setNumberOfThreads(16)
t = stopwatch.Timer()
c = community.detectCommunities(g, inspect=False)
t.stop()

print("#LOG# algorithm_run/1/runtime: {}".format(t.elapsed))

community.writeCommunities(c, sys.argv[2])

print("#LOG# clustering/2/algorithm_run_id: 1")
print("#LOG# clustering/2/source: computation")
print("#LOG# clustering/2/path: {}".format(sys.argv[2]))
Exemplo n.º 12
0
	def create(cls, G, preset="default", config=None):
		""" creates a profile object

		Args:
			G: graph to profile
			preset: name of preset configuration: "complete", "minimal", "default"
			config: object to control some aspects of the generation behaviour (Config)
		Returns:
			profile object
		"""

		# if no custom config is given, use a preconfigured config according to preset name
		if not config:
			config = Config.createConfig(preset)

		result = cls(G, cls.__TOKEN)
		# TODO: use copy constructor instead
		result.__config = config

		kit.setNumberOfThreads(result.__parallel)

		def funcScores(instance):
			""" returns node scores """
			return instance.scores()

		def funcSizes(instance):
			""" returns partition subset sizes """
			return sorted(instance.getPartition().subsetSizes())

		if G.isDirected():
			classConnectedComponents = components.StronglyConnectedComponents
		else:
			classConnectedComponents = components.ConnectedComponents

		# internal unique name | category name | display name |
		# compute correlation within same category | value function for measures | display name (axis) | class name of measure | parameter of constructor
		for parameter in [
			("Centrality.Degree",					"Node Centrality",	"Degree",
				True,	funcScores,	"Score",				centrality.DegreeCentrality, 			(G, )),
			("Centrality.CoreDecomposition",		"Node Centrality",	"k-Core Decomposition",
				True,	funcScores,	"Score",				centrality.CoreDecomposition, 			(G, )),
			("Centrality.ClusteringCoefficient",	"Node Centrality",	"Local Clustering Coefficient",
				True,	funcScores,	"Score",				centrality.LocalClusteringCoefficient,	(G, )),
			("Centrality.PageRank", 				"Node Centrality",	"PageRank",
				True,	funcScores,	"Score",				centrality.PageRank, 					(G, )),
			("Centrality.KPath", 					"Node Centrality",	"k-Path Centrality",
				True,	funcScores,	"Score",				centrality.KPathCentrality,				(G, )),
			("Centrality.Katz",						"Node Centrality",	"Katz Centrality",
				True,	funcScores,	"Score",				centrality.KatzCentrality,				(G, )),
			("Centrality.Betweenness", 				"Node Centrality",	"Betweenness",
				True,	funcScores,	"Score",				centrality.ApproxBetweenness2,			(G, 10, True)),
			("Centrality.Closeness",				"Node Centrality",	"Closeness",
				True,	funcScores,	"Score",				centrality.ApproxCloseness,				(G, 10, True)),
			("Partition.Communities", 				"Partition",		"Communities",
				False,	funcSizes,	"Nodes per Community",	community.PLM,			 				(G, )),
			("Partition.ConnectedComponents", 		"Partition",		"Connected Components",
				False,	funcSizes,	"Nodes per Component",	classConnectedComponents,				(G, )),
			("Partition.CoreDecomposition", 		"Partition",		"k-Core Decomposition",
				False,	funcSizes,	"Nodes per Shell",		centrality.CoreDecomposition, 			(G, ))
		]: result.__addMeasure(parameter)

		if cls.__verbose:
			timerAll = stopwatch.Timer()
		result.__loadProperties()
		result.__loadMeasures()
		if cls.__verbose:
			if cls.__verboseLevel < 1:
				print("")
			print("\ntotal time (measures + stats + correlations): {:.2F} s".format(timerAll.elapsed))
			print("total speed: {:.1F} edges/s".format(G.numberOfEdges() / timerAll.elapsed))
		return result;