Beispiel #1
0
    def fit_hyperbolic(self, g):
        networkit.setSeed(seed=42, useThreadId=False)
        degrees = networkit.centrality.DegreeCentrality(g).run().scores()
        with PrintBlocker():
            fit = powerlaw.Fit(degrees, fit_method='Likelihood')
        gamma = max(fit.alpha, 2.1)
        n, m = g.size()
        degree_counts = collections.Counter(degrees)
        n_hyper = n + max(0, 2 * degree_counts[1] - degree_counts[2])
        k = 2 * m / (n_hyper - 1)

        def criterium(h):
            with PrintBlocker():
                return networkit.globals.clustering(h)

        goal = criterium(g)

        def guess_goal(t):
            hyper_t = networkit.generators.HyperbolicGenerator(
                n_hyper, k, gamma, t).generate()
            hyper_t = self.shrink_to_giant_component(hyper_t)
            return criterium(hyper_t)

        t, crit_diff = self.binary_search(guess_goal, goal, 0, 0.99)
        hyper = networkit.generators.HyperbolicGenerator(n_hyper, k, gamma,
                                                         t).generate()
        info_map = [("n", n_hyper), ("k", k), ("gamma", gamma), ("t", t)]
        info = "|".join([name + "=" + str(val) for name, val in info_map])
        return (info, hyper)
Beispiel #2
0
	def testMerge(self):
		n1, n2 = 100, 150
		p1, p2 = 0.01, 0.05

		def testGraphs (Gorig, Gmerge, G1):
			for u in range(max(Gorig.upperNodeIdBound(), G1.upperNodeIdBound())):
				self.assertEqual(Gmerge.hasNode(u), Gorig.hasNode(u) or G1.hasNode(u))

			Gorig.forEdges(lambda u, v, w, eid: self.assertTrue(Gmerge.hasEdge(u, v)))
			G1.forEdges(lambda u, v, w, eid: self.assertTrue(Gmerge.hasEdge(u, v)))

			def checkEdges(u, v, w, eid):
				if Gorig.hasNode(u) and Gorig.hasNode(v) and Gorig.hasEdge(u, v):
					self.assertEqual(Gorig.weight(u, v), w)
				else:
					self.assertEqual(G1.weight(u, v), w)
			Gmerge.forEdges(checkEdges)

		for seed in range(1, 4):
			nk.setSeed(seed, False)
			random.seed(seed)
			for directed in [True, False]:
				for weighted in [True, False]:
					Gorig = nk.generators.ErdosRenyiGenerator(n1, p1, directed).generate()
					G1 = nk.generators.ErdosRenyiGenerator(n2, p2, directed).generate()
					if weighted:
						Gorig = self.generateRandomWeights(Gorig)
						G1 = self.generateRandomWeights(G1)
					Gmerge = copy(Gorig)
					nk.graphtools.merge(Gmerge, G1)
					testGraphs(Gorig, Gmerge, G1)
Beispiel #3
0
    def testToUnWeighted(self):
        n = 200
        p = 0.2

        def testGraphs(G, G1):
            self.assertEqual(G.numberOfNodes(), G1.numberOfNodes())
            self.assertEqual(G.upperNodeIdBound(), G1.upperNodeIdBound())
            self.assertEqual(G.numberOfEdges(), G1.numberOfEdges())
            self.assertNotEqual(G.isWeighted(), G1.isWeighted())
            self.assertEqual(G.isDirected(), G1.isDirected())
            self.assertEqual(G.hasEdgeIds(), G1.hasEdgeIds())

            def checkEdges(u, v, w, eid):
                self.assertTrue(G1.hasEdge(u, v))
                if G1.isWeighted():
                    self.assertEqual(G1.weight(u, v), 1.0)

            G.forEdges(checkEdges)

        for seed in range(1, 4):
            nk.setSeed(seed, False)
            random.seed(seed)
            for directed in [True, False]:
                G = nk.generators.ErdosRenyiGenerator(n, p,
                                                      directed).generate()

                G1 = nk.graphtools.toWeighted(G)
                testGraphs(G, G1)

                G = self.generateRandomWeights(G)

                G1 = nk.graphtools.toUnweighted(G)
                testGraphs(G, G1)
Beispiel #4
0
def fit_ba(g, fully_connected_start):
    random.seed(42, version=2)
    networkit.setSeed(seed=42, useThreadId=False)
    n, m = g.size()
    m_0 = math.ceil(m / n)
    ba = networkit.Graph(n)
    nodes = ba.nodes()
    edges_added = 0
    if fully_connected_start:
        start_connections = itertools.combinations(nodes[:m_0], 2)
    else:  # circle
        start_connections = (
            [(nodes[m_0-1], nodes[0])] +
            [(nodes[i], nodes[i+1]) for i in range(m_0-1)]
        )
    for u, v in start_connections:
        ba.addEdge(u, v)
        edges_added += 1

    for i, v in list(enumerate(nodes))[m_0:]:
        num_new_edges = min(i, int((m-edges_added)/(n-i)))
        to_connect = set()
        while len(to_connect) < num_new_edges:
            num_draws = num_new_edges - len(to_connect)
            to_connect_draws = [
                random.choice(ba.randomEdge())
                for i in range(num_draws)
            ]
            to_connect |= set(
                u for u in to_connect_draws if not ba.hasEdge(v, u)
            )
        for u in to_connect:
            ba.addEdge(u, v)
        edges_added += num_new_edges
    return ba
Beispiel #5
0
def fit_chung_lu_constant(g):
    random.seed(42, version=2)
    networkit.setSeed(seed=42, useThreadId=False)
    degrees = networkit.centrality.DegreeCentrality(g).run().scores()
    alpha = powerlaw_fit(degrees)
    
    k = 2 * g.numberOfEdges() / g.numberOfNodes()
    
    generator = networkit.generators.PowerlawDegreeSequence(g)

    # Use the same gamma as the other algorithms 
    gamma = max(alpha, 2.1)
    generator.setGamma(-gamma)
    generator.run()
    generator.setMinimumFromAverageDegree(max(generator.getExpectedAverageDegree(), k))
    
    degree_sequence = generator.run().getDegreeSequence(g.numberOfNodes())
    graph = networkit.generators.ChungLuGenerator(degree_sequence).generate()
    make_connected(graph)
    
    info_map = [
        ("n", g.numberOfNodes()),
        ("gamma", gamma),
        ("k", k)
    ]
    
    info = "|".join([name + "=" + str(val) for name, val in info_map])

    return (info, graph)
Beispiel #6
0
    def testDijkstraFrom(self):
        n = 100
        p = 0.15
        randNodes = [i for i in range(n)]

        for weighted in [False, True]:
            for directed in [False, True]:
                for seed in range(4):
                    nk.setSeed(seed, False)
                    random.seed(seed)
                    random.shuffle(randNodes)
                    G = nk.generators.ErdosRenyiGenerator(n, p,
                                                          directed).generate()
                    if weighted:
                        G = self.generateRandomWeights(G)

                    explored = set()

                    def exploreNode(u, d):
                        self.assertFalse(u in explored)
                        self.assertGreaterEqual(d, 0)
                        explored.add(u)

                    def testSingleSource(source):
                        explored.clear()
                        nk.graph.Traversal.DijkstraFrom(G, source, exploreNode)

                    G.forNodes(testSingleSource)

                    for nSources in range(n):
                        explored.clear()
                        sources = randNodes[:nSources]
                        nk.graph.Traversal.DijkstraFrom(
                            G, sources, exploreNode)
Beispiel #7
0
def fit_hyperbolic(g):
    random.seed(42, version=2)
    networkit.setSeed(seed=42, useThreadId=False)
    degrees = networkit.centrality.DegreeCentrality(g).run().scores()
    alpha = powerlaw_fit(degrees)
    gamma = max(alpha, 2.1)
    n, m = g.size()
    degree_counts = collections.Counter(degrees)
    n_hyper = n + max(0, 2*degree_counts[1] - degree_counts[2])
    k = 2 * m / (n_hyper-1)
    def criterium(h):
        #networkit.setLogLevel("WARN")
        val = networkit.globals.clustering(h)
        #networkit.setLogLevel("INFO")
        return val
    goal = criterium(g)

    def guess_goal(t):
        hyper_t = networkit.generators.HyperbolicGenerator(
            n_hyper, k, gamma, t).generate()
        make_connected(hyper_t)
        hyper_t = shrink_to_giant_component(hyper_t)
        return criterium(hyper_t)
    t, crit_diff = binary_search(guess_goal, goal, 0.01, 0.99)
    hyper = networkit.generators.HyperbolicGenerator(
        n_hyper, k, gamma, t).generate()
    make_connected(hyper)
    info_map = [
        ("n", n_hyper),
        ("k", k),
        ("gamma", gamma),
        ("t", t)
    ]
    info = "|".join([name + "=" + str(val) for name, val in info_map])
    return (info, hyper)
Beispiel #8
0
    def testMaxDegree(self):
        n = 100
        p = 0.2
        edgeUpdates = 10

        def computeMaxDeg(G, weighted=False, inDegree=False):
            nodes = []
            G.forNodes(lambda u: nodes.append(u))
            maxDeg = 0

            def getDegree(u):
                if weighted:
                    return G.weightedDegreeIn(
                        u) if inDegree else G.weightedDegree(u)
                return G.degreeIn(u) if inDegree else G.degreeOut(u)

            for u in nodes:
                maxDeg = max(maxDeg, getDegree(u))

            return maxDeg

        def doTest(G):
            self.assertEqual(nk.graphtools.maxDegree(G),
                             computeMaxDeg(G, False))
            self.assertEqual(nk.graphtools.maxInDegree(G),
                             computeMaxDeg(G, False, True))
            self.assertEqual(nk.graphtools.maxWeightedDegree(G),
                             computeMaxDeg(G, True))
            self.assertEqual(nk.graphtools.maxWeightedInDegree(G),
                             computeMaxDeg(G, True, True))

        for seed in range(1, 4):
            nk.setSeed(seed, False)
            for directed in [True, False]:
                for weighted in [True, False]:
                    G = nk.generators.ErdosRenyiGenerator(n, p,
                                                          directed).generate()
                    if weighted:
                        G = nk.graphtools.toWeighted(G)

                    doTest(G)
                    for _ in range(edgeUpdates):
                        e = nk.graphtools.randomEdge(G)
                        G.removeEdge(e[0], e[1])
                        doTest(G)

                    for _ in range(edgeUpdates):
                        e = nk.graphtools.randomNode(
                            G), nk.graphtools.randomNode(G)
                        while G.hasEdge(e[0], e[1]):
                            e = nk.graphtools.randomNode(
                                G), nk.graphtools.randomNode(G)
                        G.addEdge(e[0], e[1])
                        doTest(G)
Beispiel #9
0
    def testCentralityApproxSpanningEdge(self):
        nk.setSeed(42, False)
        g = nk.generators.ErdosRenyiGenerator(300, 0.1, False).generate()
        g.indexEdges()
        eps = 0.1

        apx = nk.centrality.ApproxSpanningEdge(g, eps)
        apx.run()
        se = nk.centrality.SpanningEdgeCentrality(g, eps)
        se.runParallelApproximation()
        for apxScore, exactScore in zip(apx.scores(), se.scores()):
            self.assertLessEqual(abs(apxScore - exactScore), 2 * eps)
Beispiel #10
0
    def testAppend(self):
        n1, n2 = 100, 50
        p1, p2 = 0.01, 0.05
        nodesToDelete = 20

        def testGraphs(G, G1, G2):
            self.assertEqual(G.numberOfNodes(),
                             G1.numberOfNodes() + G2.numberOfNodes())
            self.assertEqual(G.numberOfEdges(),
                             G1.numberOfEdges() + G2.numberOfEdges())
            self.assertEqual(G.isDirected(), G1.isDirected())
            self.assertEqual(G.isDirected(), G2.isDirected())
            self.assertEqual(G.isWeighted(), G1.isWeighted())
            self.assertEqual(G.isWeighted(), G2.isWeighted())

            nodeMap = {}
            v = G1.upperNodeIdBound()
            for u in range(G2.upperNodeIdBound()):
                if G2.hasNode(u):
                    nodeMap[u] = v
                    v += 1

            G1.forNodes(lambda u: self.assertTrue(G.hasNode(u)))
            G1.forEdges(lambda u, v, w, eid: self.assertTrue(G.hasEdge(u, v)))
            G2.forNodes(lambda u: self.assertTrue(G.hasNode(nodeMap[u])))
            G2.forEdges(lambda u, v, w, eid: self.assertTrue(
                G.hasEdge(nodeMap[u], nodeMap[v])))

        for seed in range(1, 4):
            nk.setSeed(seed, False)
            random.seed(seed)
            for directed in [True, False]:
                for weighted in [True, False]:
                    G1 = nk.generators.ErdosRenyiGenerator(
                        n1, p1, directed).generate()
                    G2 = nk.generators.ErdosRenyiGenerator(
                        n2, p2, directed).generate()
                    if weighted:
                        G1 = self.generateRandomWeights(G1)
                        G2 = self.generateRandomWeights(G2)

                    G = copy(G1)
                    nk.graphtools.append(G, G2)
                    testGraphs(G, G1, G2)

                    for _ in range(nodesToDelete):
                        G1.removeNode(nk.graphtools.randomNode(G1))
                        G2.removeNode(nk.graphtools.randomNode(G2))
                        G3 = copy(G1)
                        nk.graphtools.append(G3, G2)
                        testGraphs(G3, G1, G2)
    def testBFSfrom(self):
        n = 200
        p = 0.15

        def doBFS(G, sources):
            visited = [False for _ in range(n)]
            sequence = []
            edgeSequence = []
            queue = []

            for source in sources:
                queue.append(source)
                visited[source] = True

            while len(queue) > 0:
                u = queue.pop(0)
                sequence.append(u)
                for v in G.neighbors(u):
                    if visited[v] == False:
                        queue.append(v)
                        visited[v] = True
                        edgeSequence.append((u, v))

            return sequence, edgeSequence

        randNodes = [x for x in range(n)]

        for seed in range(1, 4):
            nk.setSeed(seed, False)
            random.seed(seed)
            random.shuffle(randNodes)
            for directed in [False, True]:
                G = nk.generators.ErdosRenyiGenerator(n, p,
                                                      directed).generate()
                for i in range(1, n + 1):
                    sources = randNodes[:i]
                    sequence, _ = doBFS(G, sources)

                    result = []
                    nk.graph.Traversal.BFSfrom(G, sources,
                                               lambda u, d: result.append(u))
                    self.assertListEqual(sequence, result)

                    sources = randNodes[i - 1]
                    _, edgeSequence = doBFS(G, [sources])

                    result = []
                    nk.graph.Traversal.BFSEdgesFrom(
                        G, sources, lambda u, v, w, eid: result.append((u, v)))
                    self.assertListEqual(edgeSequence, result)
Beispiel #12
0
def fit_er(g):
    random.seed(42, version=2)
    networkit.setSeed(seed=42, useThreadId=False)
    n, m = g.size()

    p = ((2*m)/(n-1)-2)/(n-2)

    graph = networkit.generators.ErdosRenyiGenerator(n, p).generate()

    tree_edges = random_tree(n)
    for u, v in tree_edges:
        if not graph.hasEdge(graph.nodes()[u], graph.nodes()[v]):
            graph.addEdge(graph.nodes()[u], graph.nodes()[v])

    return graph
Beispiel #13
0
    def analyze(self, g):
        # networkit.engineering.setNumberOfThreads(1)
        originally_weighted = g.isWeighted()
        if originally_weighted:
            g = g.toUnweighted()
        g.removeSelfLoops()
        g = self.shrink_to_giant_component(g)
        degrees = networkit.centrality.DegreeCentrality(g).run().scores()
        with PrintBlocker():
            fit = powerlaw.Fit(degrees, fit_method='Likelihood')
        stats = {
            "Originally Weighted": originally_weighted,
            "Degree Distribution": {
                "Powerlaw": {
                    "Alpha": fit.alpha,
                    "KS Distance": fit.power_law.KS()
                }
            }
        }

        #############

        # possible profiles: minimal, default, complete
        networkit.profiling.Profile.setParallel(1)
        networkit.setSeed(seed=42, useThreadId=False)
        pf = networkit.profiling.Profile.create(g, preset="complete")

        for statname in pf._Profile__measures.keys():
            stats[statname] = pf.getStat(statname)

        stats.update(pf._Profile__properties)

        keys = [[key] for key in stats.keys()]
        output = dict()
        while keys:
            key = keys.pop()
            val = self.getDeepValue(stats, key)
            if isinstance(val, dict):
                keys += [key + [subkey] for subkey in val]
            elif isinstance(val, int) or isinstance(val, float):
                output[".".join(key)] = val
            elif key == ['Diameter Range']:
                output['Diameter Min'] = val[0]
                output['Diameter Max'] = val[1]

        return output
Beispiel #14
0
    def testGraphTranspose(self):
        for seed in range(1, 4):
            nk.setSeed(seed, True)
            random.seed(seed)
            G = nk.generators.ErdosRenyiGenerator(100, 0.2, True).generate()

            for _ in range(20):
                u = nk.graphtools.randomNode(G)
                if not G.hasEdge(u, u):
                    G.addEdge(u, u)

            # Delete a few nodes
            for _ in range(10):
                G.removeNode(nk.graphtools.randomNode(G))
            self.assertGreater(G.numberOfSelfLoops(), 0)

            # Assign random weights
            GWeighted = self.generateRandomWeights(G)

            GWeighted.indexEdges()
            GTrans = nk.graphtools.transpose(GWeighted)

            def checkGWeightedEdges(u, v, w, eid):
                self.assertEqual(GWeighted.edgeId(u, v), GTrans.edgeId(v, u))
                self.assertEqual(GWeighted.weight(u, v), GTrans.weight(v, u))

            GWeighted.forEdges(checkGWeightedEdges)

            def checkGTransEdges(v, u, w, eid):
                self.assertEqual(GWeighted.edgeId(u, v), GTrans.edgeId(v, u))
                self.assertEqual(GWeighted.weight(u, v), GTrans.weight(v, u))

            GTrans.forEdges(checkGTransEdges)

            for u in range(GWeighted.upperNodeIdBound()):
                self.assertEqual(GWeighted.hasNode(u), GTrans.hasNode(u))

            self.assertEqual(GWeighted.numberOfNodes(), GTrans.numberOfNodes())
            self.assertEqual(GWeighted.upperNodeIdBound(),
                             GTrans.upperNodeIdBound())
            self.assertEqual(GWeighted.numberOfEdges(), GTrans.numberOfEdges())
            self.assertEqual(GWeighted.upperEdgeIdBound(),
                             GTrans.upperEdgeIdBound())
            self.assertEqual(GWeighted.numberOfSelfLoops(),
                             GTrans.numberOfSelfLoops())
Beispiel #15
0
    def testNodeIterator(self):
        nk.setSeed(42, False)

        g = self.getSmallGraph()

        def doTest(g):
            nodes = []
            g.forNodes(lambda u: nodes.append(u))

            i = 0
            for u in g.iterNodes():
                self.assertEqual(u, nodes[i])
                i += 1

        doTest(g)
        g.removeNode(nk.graphtools.randomNode(g))
        g.removeNode(nk.graphtools.randomNode(g))
        doTest(g)
def ascending_recursive_PLM_orderings(g, amount_orderings):
    '''
    Takes a graph and an amount_orderings. Returns a list of orderings based on recursive application of PLM.
    This list has the length amount_orderings. Reorders the leaves with a connectivity-based pseudorandom DFS.
    '''

    if config.TIME_STAMPS >= config.TimeStamps.ALL:
        i = 0
        before = pd.Timestamp.now()

    nk.setSeed(config.SEED, False)
    root = contraction_trees.recursive_PLM(g)
    orderings = ascending_connected_random_orderings(g, root, amount_orderings)

    if config.TIME_STAMPS >= config.TimeStamps.ALL:
        after = pd.Timestamp.now()
        print("Total time: {:f}s".format((after - before).total_seconds()))

    return orderings
def ascending_accumulated_contraction_orderings(g, amount_orderings):
    '''
    Takes a graph and an amount_orderings. Returns a list of orderings. One half are plm orderings, the other
    affinity orderings. This list has the length amount_orderings. Reorders the leaves with a connectivity-based pseudorandom DFS.
    '''

    if config.TIME_STAMPS >= config.TimeStamps.ALL:
        i = 0
        before = pd.Timestamp.now()

    nk.setSeed(config.SEED, False)

    orderings = ascending_affinity_orderings(
        g, amount_orderings - (amount_orderings // 2))
    orderings += ascending_recursive_PLM_orderings(g, amount_orderings // 2)

    if config.TIME_STAMPS >= config.TimeStamps.ALL:
        after = pd.Timestamp.now()
        print("Total time: {:f}s".format((after - before).total_seconds()))

    return orderings
Beispiel #18
0
	def testRandomEdgesReproducibility(self):
		numSamples = 10
		numSeeds = 3
		numRepeats = 10

		for directed in [False, True]:
			G = self.getSmallGraph(False, directed)

			results = [[] for i in range(numSeeds)]
			for repeats in range(numRepeats):
				for seed in range(numSeeds):
					nk.setSeed(seed, False)
					results[seed].append(nk.graphtools.randomEdges(G, numSamples))

			# assert results are different for different seeds
			for seed in range(1, numSeeds):
				self.assertNotEqual(results[0][0], results[seed][0])

			# assert results are consistent for same seeds
			for repeats in results:
				for x in repeats[1:]:
					self.assertEqual(repeats[0], x)
    def testDFSfrom(self):
        n = 200
        p = 0.15

        def doDFS(G, source):
            visited = [False for _ in range(n)]
            sequence = []
            edgeSequence = []
            visited[source] = 1
            stack = [source]

            while len(stack) > 0:
                u = stack.pop()
                sequence.append(u)
                for v in G.neighbors(u):
                    if visited[v] == False:
                        stack.append(v)
                        visited[v] = True
                        edgeSequence.append((u, v))

            return sequence, edgeSequence

        for seed in range(1, 4):
            nk.setSeed(seed, False)
            for directed in [False, True]:
                G = nk.generators.ErdosRenyiGenerator(n, p,
                                                      directed).generate()
                for source in range(n):
                    sequence, edgeSequence = doDFS(G, source)

                    result = []
                    nk.graph.Traversal.DFSfrom(G, source,
                                               lambda u: result.append(u))
                    self.assertListEqual(sequence, result)

                    result = []
                    nk.graph.Traversal.DFSEdgesFrom(
                        G, source, lambda u, v, w, eid: result.append((u, v)))
                    self.assertListEqual(edgeSequence, result)
Beispiel #20
0
    def testSCD(self):
        nk.setSeed(42, False)
        seed = 20
        seeds = [seed]
        for name, algo in [("PageRankNibble",
                            nk.scd.PageRankNibble(self.G, 0.1, 1e-12)),
                           ("GCE L", nk.scd.GCE(self.G, "L")),
                           ("GCE M", nk.scd.GCE(self.G, "M")),
                           ("LFM", nk.scd.LFMLocal(self.G, 0.8)),
                           ("TwoPhaseL", nk.scd.TwoPhaseL(self.G)),
                           ("TCE", nk.scd.TCE(self.G)),
                           ("LTE", nk.scd.LocalTightnessExpansion(self.G)),
                           ("LocalT", nk.scd.LocalT(self.G)),
                           ("Clique", nk.scd.CliqueDetect(self.G))]:
            result = algo.run(seeds)[seed]

            self.assertGreaterEqual(len(result), 1,
                                    "{} has empty community".format(name))

            cond = nk.scd.SetConductance(self.G, result).run().getConductance()

            self.assertLessEqual(cond, 0.5,
                                 "{} has too large conductance".format(name))
 def generate(self):
     networkit.setSeed(seed=time.clock(), useThreadId=False)
     return networkit.generators.ErdosRenyiGenerator(
         self.node_count, 0.0002).generate()
 def generate(self):
     networkit.setSeed(seed=time.clock(), useThreadId=False)
     return networkit.generators.BarabasiAlbertGenerator(
         1, self.node_count, 1).generate()
 def generate(self):
     networkit.setSeed(seed=time.clock(), useThreadId=False)
     return networkit.generators.ChungLuGenerator(
         self.deg_sequence).generate()
Beispiel #24
0
 def fit_er(self, g):
     networkit.setSeed(seed=42, useThreadId=False)
     return networkit.generators.ErdosRenyiGenerator.fit(g).generate()
Beispiel #25
0
def fit_chung_lu(g):
    networkit.setSeed(seed=42, useThreadId=False)
    return networkit.generators.ChungLuGenerator.fit(g).generate()
Beispiel #26
0
def fit_chung_lu(g):
    random.seed(42, version=2)
    networkit.setSeed(seed=42, useThreadId=False)
    g = networkit.generators.ChungLuGenerator.fit(g).generate()
    make_connected(g)
    return g