Esempio n. 1
0
 def setUp(self):
     # toggle the comment/uncomment to test on small or large test cases
     #self.L = nk.readGraph("PGPgiantcompo.graph", nk.Format.METIS) #without self-loops
     #self.LL = nk.readGraph("PGPConnectedCompoLoops.gml", nk.Format.GML) #with self-loops sprinkled in
     self.L = nk.readGraph("input/looptest1.gml",
                           nk.Format.GML)  #without self-loops
     self.LL = nk.readGraph("input/looptest2.gml",
                            nk.Format.GML)  #with self-loops sprinkled in
Esempio n. 2
0
    def test_centrality_groupcloseness_growshrink(self):
        g = nk.readGraph('input/MIT8.edgelist',
                         nk.Format.EdgeList,
                         separator='\t',
                         firstNode=0,
                         continuous=False,
                         directed=False)
        g = nk.components.ConnectedComponents(
            g).extractLargestConnectedComponent(g, True)
        k = 5

        nk.engineering.setSeed(42, False)
        for weighted in [False, True]:
            group = set()
            while len(group) < k:
                group.add(nk.graphtools.randomNode(g))

            gc = nk.centrality.GroupClosenessGrowShrink(g, group).run()

            groupMaxCC = gc.groupMaxCloseness()
            self.assertEqual(len(set(groupMaxCC)), k)
            self.assertGreaterEqual(gc.numberOfIterations(), 0)

            for u in groupMaxCC:
                self.assertTrue(g.hasNode(u))
Esempio n. 3
0
    def test_SpectralColoring(self):
        G = nk.readGraph("input/karate.graph", nk.Format.METIS)
        spCol = nk.coloring.SpectralColoring(G)

        spCol.run()

        self.assertLessEqual(len(spCol.getColoring()), G.upperNodeIdBound())
def test(filename, measure, iterations=80, results_file=None):
    g = nk.readGraph(filename, nk.Format.GML)
    g.setName(os.path.basename(filename).split(".", 1)[0])

    if g.isDirected():
        g = g.toUndirected()

    for i in range(iterations):
        print(measure(g), file=results_file)
Esempio n. 5
0
def graphMeta(graphNames, graphDir):
    meta = []
    for name in graphNames:
        info("loading {name}".format(**locals()))
        G = networkit.readGraph(os.path.join(graphDir, "{0}.gml.graph".format(name)), networkit.Format.GML)
        (n, m) = networkit.properties.size(G)
        meta.append({"name" : name, "n" : n, "m" : m})
    info("done")
    return pandas.DataFrame(meta, columns=["name", "n", "m"])
Esempio n. 6
0
def test(filename, measure, iterations=1, results_file=None):
    g = nk.readGraph(filename, nk.Format.GML)
    g.setName(os.path.basename(filename).split(".", 1)[0])

    if g.isDirected():
        g = g.toUndirected()

    for i in range(iterations):
        print(measure(nk.nk2nx(g)), file=results_file)
Esempio n. 7
0
def walk(inputDir, outputDir, graphFormat, filePattern="*",  preset="default", config=None, outputType="HTML", style="light", color=colors["green"], recursive=False, parallel=False):
	""" tests all files of a directory for the given conditions and generates a profile when matching

	Parameters:
	-----------
		inputDir: the directory to search
		filePattern: specify accepted file names, e.g.: *.METIS.graph
		outputDir: directory to write the generated profiles
		preset: config preset ("minimal", "default", "full")
		config: object for fine-grained control over profile content (Config) -- overrides preset
		outputType: profile output format ("HTML", "LaTeX")
		style: style of generated output ("light")
		color: mainly used color of given style (RGB values in [0,1])
		recursive: also search in subfolders for matching files
		parallel: run some additional parts of the generation in parallel (experimental)
		graphFormat: format of matching files (e.g.: Format.METIS)
	"""

	# if no custom config is given, use a preconfigured config according to preset name
	if not config:
		config = Config.createConfig(preset)

	if not os.path.isdir(outputDir):
		os.mkdir(outputDir)

	for (dirpath, dirnames, filenames) in os.walk(inputDir):
		for filename in filenames:
			file = dirpath + "/" + filename
			if fnmatch.fnmatch(filename, filePattern):
				Profile.verbosePrint("\n[ " + file + " ]")
				try:
					G = kit.readGraph(file, graphFormat)
					try:
						pf = Profile.create(
							G,
							config = config
						)
						Profile.verbosePrint("");
						pf.output(
							outputType = outputType,
							directory = outputDir,
							style = style,
							color = color,
							parallel = parallel
						)
					except Exception as e:
						Profile.verbosePrint("=> an error occured: {0} of type {1}".format(e, type(e)))
						Profile.verbosePrint(traceback.format_exc())
				except:
					Profile.verbosePrint("could not read {0}".format(file))
				Profile.verbosePrint("\n")
			else:
				Profile.verbosePrint("skipping {0} as it does not match filePattern".format(file))
		if not recursive:
			break
	print("Done")
Esempio n. 8
0
def walk(inputDir, outputDir, graphFormat, filePattern="*",  preset="default", config=None, outputType="HTML", style="light", color=colors["green"], recursive=False, parallel=False):
	""" tests all files of a directory for the given conditions and generates a profile when matching

	Args:
		inputDir: the directory to search
		filePattern: specify accepted file names, e.g.: *.METIS.graph
		outputDir: directory to write the generated profiles
		preset: config preset ("minimal", "default", "full")
		config: object for fine-grained control over profile content (Config) -- overrides preset
		outputType: profile output format ("HTML", "LaTeX")
		style: style of generated output ("light")
		color: mainly used color of given style (RGB values in [0,1])
		recursive: also search in subfolders for matching files
		parallel: run some additional parts of the generation in parallel (experimental)
		graphFormat: format of matching files (e.g.: Format.METIS)
	"""

	# if no custom config is given, use a preconfigured config according to preset name
	if not config:
		config = Config.createConfig(preset)

	if not os.path.isdir(outputDir):
		os.mkdir(outputDir)

	for (dirpath, dirnames, filenames) in os.walk(inputDir):
		for filename in filenames:
			file = dirpath + "/" + filename
			if fnmatch.fnmatch(filename, filePattern):
				Profile.verbosePrint("\n[ " + file + " ]")
				try:
					G = kit.readGraph(file, graphFormat)
					try:
						pf = Profile.create(
							G,
							config = config
						)
						Profile.verbosePrint("");
						pf.output(
							outputType = outputType,
							directory = outputDir,
							style = style,
							color = color,
							parallel = parallel
						)
					except Exception as e:
						Profile.verbosePrint("=> an error occured: {0} of type {1}".format(e, type(e)))
						Profile.verbosePrint(traceback.format_exc())
				except:
					Profile.verbosePrint("could not read {0}".format(file))
				Profile.verbosePrint("\n")
			else:
				Profile.verbosePrint("skipping {0} as it does not match filePattern".format(file))
		if not recursive:
			break
	print("Done")
Esempio n. 9
0
def compute_network_size(path, out):
	import networkit as nk
	try:
		g = nk.readGraph(path, nk.Format.EdgeList,
				separator=' ', firstNode=0, continuous=False, directed=False)
	except Exception: # Exception due the attempt of reading a non-network file
		return
	data = {
		'n': g.numberOfNodes(),
		'm': g.numberOfEdges()
	}
	yaml.dump(data, out, default_flow_style=False)
Esempio n. 10
0
    def _execute_one_graph(self, graph_dict):
        in_path = (GraphCrawler()._stagepath + graph_dict["Group"] + "/" +
                   graph_dict["Path"])
        out_path = self._stagepath + "results.csv"
        graph_type = graph_dict["Group"]

        g = None
        try:
            g = networkit.readGraph(in_path,
                                    networkit.Format.EdgeList,
                                    separator=" ",
                                    firstNode=0,
                                    commentPrefix="%",
                                    continuous=True)
        except Exception as e:
            print(e)

        if not g:
            print("could not import graph from path", in_path)
        if g.numberOfNodes() > 0 and g.numberOfEdges() > 0:
            if g.degree(0) == 0:
                g.removeNode(0)

        print("Graph", g.toString())
        g = self.shrink_to_giant_component(g)
        if g.numberOfNodes() < 100:
            print("Graph is too small (" + str(g.numberOfNodes()) +
                  " nodes, needs 100): " + in_path)

        model_types = [("real-world", lambda x: ("", x)),
                       ("ER", lambda x: ("", self.fit_er(x))),
                       ("BA circle", lambda x:
                        ("", self.fit_ba(x, fully_connected_start=False))),
                       ("BA full", lambda x:
                        ("", self.fit_ba(x, fully_connected_start=True))),
                       ("chung-lu", lambda x: ("", self.fit_chung_lu(x))),
                       ("hyperbolic", self.fit_hyperbolic)]

        # outputs = []
        # all_keys = set()
        for model_name, model_converter in model_types:
            try:
                info, model = model_converter(g)
                output = self.analyze(model)
            except ZeroDivisionError as e:
                print("Error:", e, "for", model_name, "of", g.getName(), model)
            else:
                output["Graph"] = g.getName()
                output["Type"] = graph_type
                output["Model"] = model_name
                output["Info"] = info
                self._save_as_csv(output)
Esempio n. 11
0
def graphMeta(graphNames,
              graphDir,
              fileEnding=".gml.graph",
              graphFormat=networkit.Format.GML):
    meta = []
    for name in graphNames:
        info("loading {name}".format(**locals()))
        G = networkit.readGraph(
            os.path.join(graphDir, "{0}{1}".format(name, fileEnding)),
            graphFormat)
        (n, m) = G.size()
        meta.append({"name": name, "n": n, "m": m})
    info("done")
    return pandas.DataFrame(meta, columns=["name", "n", "m"])
Esempio n. 12
0
def to_networkit(data):
    """
    convert the dataset to a `networkit <https://networkit.github.io/>`_ graph.
    
    :param data: :py:class:`gct.Dataset`
    :rtype: networkit graph
    """
    import networkit
    fname = data.file_edges
    if not utils.file_exists(fname):
        data.to_edgelist()
    return networkit.readGraph(fname,
                               fileformat=networkit.Format.EdgeListSpaceZero,
                               directed=data.is_directed())
Esempio n. 13
0
	def test_components_StronglyConnectedComponents(self):
		g = nk.readGraph("input/MIT8.edgelist",
				nk.Format.EdgeList, separator='\t', firstNode=0, continuous=False, directed=True)
		scc = nk.components.StronglyConnectedComponents(g)
		scc.run()
		self.assertNotEqual(scc.componentOfNode(0), None)
		nComponents = scc.numberOfComponents()
		compSizes = scc.getComponentSizes()
		self.assertEqual(nComponents, len(compSizes))

		comps = scc.getComponents()
		for idx, size in compSizes.items():
			self.assertEqual(len(comps[idx]), size)

		_=scc.getPartition()
Esempio n. 14
0
	def testCentralityGroupClosenessLocalSearch(self):
		g = nk.readGraph('input/celegans_metabolic.graph', nk.Format.METIS)
		k = 5

		nk.engineering.setSeed(42, False)
		for weighted in [False, True]:
			group = set()
			while len(group) < k:
				group.add(nk.graphtools.randomNode(g))

			gc = nk.centrality.GroupClosenessLocalSearch(g, group).run()

			groupMaxCC = gc.groupMaxCloseness()
			self.assertEqual(len(set(groupMaxCC)), k)

			for u in groupMaxCC:
				self.assertTrue(g.hasNode(u))
Esempio n. 15
0
	def testSortEdgesByWeight(self):
		def checkSortedEdges(g, decreasing):
			for u in g.iterNodes():
				prevNode = g.numberOfNodes() if decreasing else -1
				prevWeight = 2 if decreasing else 0

				for v in g.iterNeighbors(u):
					w = g.weight(u, v)
					if decreasing:
						if w == prevWeight:
							self.assertLess(prevNode, v)
						else:
							self.assertLess(w, prevWeight)
					else:
						if w == prevWeight:
							self.assertLess(prevNode, v)
						else:
							self.assertGreater(w, prevWeight)

					prevNode, prevWeight = v, w

		def doTest(g):
			nk.graphtools.sortEdgesByWeight(g, False)
			checkSortedEdges(g, False)
			nk.graphtools.sortEdgesByWeight(g, True)
			checkSortedEdges(g, True)

		g = nk.readGraph('input/PGPgiantcompo.graph', nk.Format.METIS)
		g.removeSelfLoops()
		g.removeMultiEdges()

		# Test unweighted
		doTest(g)

		random.seed(1)
		g = self.generateRandomWeights(g)
		e = nk.graphtools.randomEdge(g)

		# Test weighted
		doTest(g)
Esempio n. 16
0
 def setUp(self):
     self.G = nk.readGraph("input/PGPgiantcompo.graph", nk.Format.METIS)
Esempio n. 17
0
    def load(self, file_name, dists_given=False, directed=True):
        """
        Loads a problem from one or two (if not dists_given) files. The first one <file_name>.tasks
        gives general info of the problem number of days, ... and the second contains the graph edges.
        The format is the following:

        *************** "<file_name>.tasks" ***************

        <Number of days (int)> D
        <Number of shifts (int)> S
        <Number of teams (int)> E
        <base location (int)> n

        <tasks locations on the graph (ints)> n1 n2 ... nj

        <tasks times of team 0 (floats)> t1 t2 ... -1 % -1 = inf
        <tasks times of team 1 (floats)> t1 -1 ... tj
        ...
        <tasks times of team E (floats)> -1 t2 ... -1

        <costs of hiring team 0 on each shift (floats)> c01 c02 ... c0(D*S) c0(D*S + 1)
        % The last one corresponds to hiring team 0 to many shifts
        <costs of hiring team 1 on each shift (floats)> c11 c12 ... c1(D*S) c1(D*S + 1)
        ...
        <costs of hiring team E on each shift (floats)> cE1 cE2 ... cE(D*S) cE(D*S + 1)

        <costs of postponing task 1 on each shift (floats)> c11 c12 ... c1(D*S) c1(D*S + 1)
        % The last one corresponds to hiring team 0 to many shifts
        <costs of postponing task 2 on each shift (floats)> c21 c22 ... c2(D*S) c2(D*S + 1)
        ...
        <costs of postponing task T on each shift (floats)> cT1 cT2 ... cT(D*S) cT(D*S + 1)
        % Here T = number of tasks (without the base)

        % OPTIONAL
        <distance from 0 to the others (floats)> d00 d01 ... d0j
        <distance from 1 to the others (floats)> d10 d11 ... d1j
        ...
        <distance from j to the others (floats)> dj0 dj1 ... djj


        % If the distances are given it doesn't load the graph
        *************** "<file_name>.graph" ***************

        <edge1 (ints)> n1 n2 w1
        <edge2 (ints)> n1 n3 w2
        ...
        <edgei (ints)> nj nk wi

        Args:
            file_name (str): the name of the file (without the extension)
            dists_given (bool): True if the distances are given in "<file_name>.tasks"
            directed (bool, default=True): True if the given graph is directed
        """

        with open(file_name + '.tasks', 'r') as tasks_file:

            self.days = int(tasks_file.readline()[:-1])
            self.shifts = int(tasks_file.readline()[:-1])
            self.teams = int(tasks_file.readline()[:-1])
            self.tasks_loc = [int(tasks_file.readline()[:-1])] + list(map(int, tasks_file.readline()[:-1].split(' ')))

            self.tasks_loc = np.array(self.tasks_loc)

            self.tasks_times = np.zeros((self.teams, self.tasks_loc.shape[0]))
            for i in range(self.teams):
                line = tasks_file.readline()
                times = [0.] + list(map(float, line[:-1].split(' ')))
                self.tasks_times[i] = np.array(times)
                self.tasks_times[i, self.tasks_times[i] < 0] = np.inf

            self.teams_costs = np.zeros((self.teams, self.days * self.shifts + 1))
            for i in range(self.teams):
                line = tasks_file.readline()
                costs = list(map(float, line[:-1].split(' ')))
                self.teams_costs[i] = np.array(costs)

            self.tasks_costs = np.zeros((self.tasks_loc.shape[0] - 1, self.days * self.shifts + 1))
            for i in range(self.tasks_loc.shape[0] - 1):
                line = tasks_file.readline()
                costs = list(map(float, line[:-1].split(' ')))
                self.tasks_costs[i] = np.array(costs)

            if dists_given:
                self.tasks_dists = np.empty((self.tasks_times.shape[0], self.tasks_times.shape[0]))
                for i, line in enumerate(tasks_file.readlines()):
                    self.tasks_dists[i] = np.array(map(float, line[:-1].split(' ')))

        if not dists_given and isfile(file_name + '.graph'):
            graph = nk.readGraph(file_name + '.graph', nk.Format.EdgeList, separator=' ', firstNode=0, directed=directed)
            self.compute_dists(graph)
Esempio n. 18
0
                                   label=label,
                                   c=next(color),
                                   alpha=0.6,
                                   linewidth=2.0)

                if debug:
                    print("{} {}".format(strategy,
                                         r_index))  #, file=file_results)

                lgd = analysis_plot.legend(loc="center left",
                                           shadow=False,
                                           bbox_to_anchor=(1.0, 0.5))
            index += 1

    pylab.tight_layout(pad=4.3, w_pad=5.4, h_pad=2.5)
    pylab.savefig(file_name + ".png", format="png", bbox_extra_artists=(lgd, ))
    pylab.close(1)

    if debug:
        file_results.close()


if __name__ == "__main__":
    # def test():
    graph = nk.readGraph("football.gml", nk.Format.GML)

    # erg = nk.generators.ErdosRenyiGenerator(2, 0.3, False)
    # graph = erg.generate()

    plot_robustness_analysis(graph)
Esempio n. 19
0
def load_from_graphtool_nk(path):
    graph = nk.readGraph(path, nk.Format.GraphToolBinary)
    return graph
def _execute_one_graph(graph_dict):
    in_path = (GraphCrawler()._stagepath + graph_dict["Group"] + "/" +
               graph_dict["Path"])
    graph_type = graph_dict["Group"]

    g = None
    try:
        g = networkit.readGraph(in_path,
                                networkit.Format.EdgeList,
                                separator=" ",
                                firstNode=0,
                                commentPrefix="%",
                                continuous=True)
    except Exception as e:
        print(e)
        return []

    if not g:
        print("could not import graph from path", in_path)
        return []
    if g.numberOfNodes() > 0 and g.numberOfEdges() > 0:
        if g.degree(0) == 0:
            g.removeNode(0)

    print("Graph", g.toString())
    g = shrink_to_giant_component(g)
    if g.numberOfNodes() < 100:
        print("Graph is too small (" + str(g.numberOfNodes()) +
              " nodes, needs 100): " + in_path)
        return []

    model_types = [("ER", lambda x: ("", fit_er(x))),
                   ("BA circle", lambda x:
                    ("", fit_ba(x, fully_connected_start=False))),
                   ("BA full", lambda x:
                    ("", fit_ba(x, fully_connected_start=True))),
                   ("chung-lu", lambda x: ("", fit_chung_lu(x))),
                   ("chung-lu constant", fit_chung_lu_constant),
                   ("hyperbolic", fit_hyperbolic)]

    outputs = []
    real_output = analyze(g)
    real_output["Graph"] = g.getName()
    real_output["Type"] = graph_type
    real_output["Model"] = "real-world"
    real_output["Info"] = ""
    outputs.append(real_output)

    # all_keys = set()
    for model_name, model_converter in model_types:
        try:
            info1, model1 = model_converter(g)
            output1 = analyze(model1)
            # Retry model based on
            info2, model2 = model_converter(model1)
            output2 = analyze(model2)
        except ZeroDivisionError as e:
            print("Error:", e, "for", model_name, "of", g.getName())
        else:
            output1["Graph"] = g.getName()
            output1["Type"] = graph_type
            output1["Model"] = model_name
            output1["Info"] = info1
            outputs.append(output1)

            output2["Graph"] = g.getName()
            output2["Type"] = graph_type
            output2["Model"] = model_name + "-second"
            output2["Info"] = info2
            outputs.append(output2)
            # all_keys |= set(output.keys())

    # for model_name, info, output in sorted(outputs):
    # for key in all_keys - set(output.keys()):
    #    output[key] = float("nan")
    return outputs
Esempio n. 21
0
	def loadGraph(self, path, graphFormat=networkit.Format.GML):
		with Timer() as t:
			G = networkit.readGraph(path, graphFormat)
		debug("reading {path} took {t.elapsed} s".format(**locals()))
		return G
#0. Parse arguments
fileFormat = sys.argv[1]
file = sys.argv[2]
t = int(sys.argv[3])
curingRate = int(sys.argv[4])
initialFractionOfInfected = float(sys.argv[5])
saveFolder = sys.argv[6]

#1. Load the network and calculate the ep. threshold
#Currently, script only supports METIS and SNAP formats
if fileFormat == 'METIS':
    fileFormat = nt.Format.METIS
else:
    fileFormat = nt.Format.SNAP
G = nt.readGraph(file, fileFormat)
epThreshold = 1/(nt.algebraic.adjacencyEigenvector(G, 0)[0])

# <codecell>

#2. Get network properties
#Number of nodes, Max deg., avg. deg, clustering.coeff, modularity, power law gamma
N = G.numberOfNodes()
dmax = nt.properties.GraphProperties.minMaxDegree(G)[1]
k = nt.properties.GraphProperties.averageDegree(G)
ck = nt.properties.GraphProperties.averageLocalClusteringCoefficient(G)
zeta = nt.community.detectCommunities(G)
mod = nt.community.Modularity().getQuality(zeta, G)
isPowerLaw, gamma = nt.properties.degreePowerLaw(G), 0
if isPowerLaw[0] == True:
    gamma = isPowerLaw[2]
    pylab.close(1)

    # Generate csv file
    import numpy as np
    matrix = np.matrix([x1, y1, y2, y3, y5])

    filename = outfile.rsplit(".", 1)[0] + ".csv"
    header = ", degree, betweeness, closeness, random"
    separator = ", "

    np.savetxt(filename, matrix.transpose(), fmt="%s", delimiter=separator,
               header=header, comments="")


if __name__ == "__main__":
    infile = sys.argv[1]
    outfile = sys.argv[2]

    if sys.argv[3] == "True":
        recalculate = True
    else:
        recalculate = False

    import networkit as nk

    gk = nk.readGraph(infile, nk.Format.GML)
    g = nk.nk2nx(gk)
    # g = nx.read_gml(infile)

    plot_functions(g, outfile, recalculate)
Esempio n. 24
0
	def setUp(self):
		self.g = nk.readGraph("input/PGPgiantcompo.graph", nk.Format.METIS)
		self.gw = self.generateRandomWeights(self.g)
def generateEgoFB():
    return nk.readGraph('./graphs/facebook_egos', nk.Format.EdgeList, firstNode=0, separator=" ", continuous=True)
Esempio n. 26
0
	def loadGraph(self, path):
		with Timer() as t:
			G = networkit.readGraph(path, networkit.Format.GML)
		debug("reading {path} took {t.elapsed} s".format(**locals()))
		return G
            print(method_name)#, file=file_results)

            for strategy in centrality.keys():
                vertices_removed, component_size, r_index = calculate(nk.Graph(g), strategy, name, sequential_analysis)
                label = "%s \n($R = %4.3f$)" % (strategy, r_index)
                analysis_plot.plot(vertices_removed, component_size, label=label, c=next(color), alpha=0.6, linewidth=2.0)

                if debug:
                    print("{} {}".format(strategy, r_index))#, file=file_results)

                lgd = analysis_plot.legend(loc="center left", shadow=False, bbox_to_anchor=(1.0, 0.5))
            index += 1

    pylab.tight_layout(pad=4.3, w_pad=5.4, h_pad=2.5)
    pylab.savefig(file_name + ".png", format="png", bbox_extra_artists=(lgd,))
    pylab.close(1)

    if debug:
        file_results.close()


if __name__ == "__main__":
# def test():
    graph = nk.readGraph("football.gml", nk.Format.GML)

    # erg = nk.generators.ErdosRenyiGenerator(2, 0.3, False)
    # graph = erg.generate()

    plot_robustness_analysis(graph)