Esempio n. 1
0
def main():
    beta: float = 0.85
    convergence: float = 0.01

    edges_np: np.ndarray = tfgraph.DataSets.naive_6()

    with tf.Session() as sess:
        writer: tf.summary.FileWriter = tf.summary.FileWriter(
            'logs/tensorflow/.')

        graph: tfgraph.Graph = tfgraph.GraphConstructor.from_edges(
            sess, "G1", edges_np, writer, is_sparse=False)

        pr_iter: tfgraph.PageRank = tfgraph.IterativePageRank(
            sess, "PR1", graph, beta)

        g_upgradeable: tfgraph.Graph = tfgraph.GraphConstructor.empty(
            sess, "G2", graph.n, writer)

        pr_upgradeable: tfgraph.PageRank = tfgraph.IterativePageRank(
            sess, "PR2", g_upgradeable, beta)

        b: np.ndarray = pr_iter.ranks_np(convergence=convergence)

        print(b)

        for e in edges_np:
            g_upgradeable.append(e[0], e[1])
            print("[" + str(e[0]) + ", " + str(e[1]) + "]")

        e = pr_upgradeable.ranks_np(convergence=convergence)
        print(e)

        writer.add_graph(sess.graph)
Esempio n. 2
0
def main():
    beta: float = 0.85
    convergence: float = 0.0001

    edges_np: np.ndarray = tfgraph.DataSets.naive_6()

    with tf.Session() as sess:
        graph: tfgraph.Graph = tfgraph.GraphConstructor.from_edges(
            sess, "G", edges_np, is_sparse=False)

        pr_iter: tfgraph.PageRank = tfgraph.IterativePageRank(
            sess, "PR1", graph, beta)

        v_4: np.ndarray = pr_iter.pagerank_vector_np(convergence=convergence,
                                                     topics=[4])
        v_5: np.ndarray = pr_iter.pagerank_vector_np(convergence=convergence,
                                                     topics=[5])

        v_45: np.ndarray = pr_iter.pagerank_vector_np(convergence=convergence,
                                                      topics=[4, 5])
        v_45_pseudo: np.ndarray = (v_4 + v_5) / 2.0

        print(v_4)
        print(np.sum(v_4))
        print(v_5)
        print(np.sum(v_5))
        print(v_45)
        print(np.sum(v_45))
        print(v_45_pseudo)
        print(np.sum(v_45_pseudo))
Esempio n. 3
0
def test_iterative_personalized_pagerank_steps():
  with tf.Session() as sess:
    beta = 0.85
    steps = 100
    graph = tfgraph.GraphConstructor.from_edges(sess, "G_proof",
                                             edges_np=tfgraph.DataSets.naive_6())
    pageRank = tfgraph.IterativePageRank(sess, "Pr_Proof", graph, beta)

    np.testing.assert_array_almost_equal(
      pageRank.ranks_np(steps=steps, topics=[4]),
      np.array([[4, 0.999186],
                [0, 0.000284],
                [5, 0.000176],
                [1, 0.000150],
                [3, 0.000116],
                [2, 0.000089]]),
      decimal=2
    )
Esempio n. 4
0
def test_iterative_pagerank_convergence():
  with tf.Session() as sess:
    beta = 0.85
    convergence = 0.01
    graph = tfgraph.GraphConstructor.from_edges(sess, "G_proof",
                                             edges_np=tfgraph.DataSets.naive_6())
    pageRank = tfgraph.IterativePageRank(sess, "Pr_Proof", graph, beta)

    np.testing.assert_array_almost_equal(
      pageRank.ranks_np(convergence=convergence),
      np.array([[0.0, 0.321017],
                [5.0, 0.20074403],
                [1.0, 0.17054307],
                [3.0, 0.13679263],
                [2.0, 0.10659166],
                [4.0, 0.0643118]]),
      decimal=2
    )
def main():
  beta: float = 0.85
  convergence: float = 0.0001

  edges_np: np.ndarray = tfgraph.DataSets.naive_6()

  with tf.Session() as sess:
    writer: tf.summary.FileWriter = tf.summary.FileWriter('logs/tensorflow/')

    graph: tfgraph.Graph = tfgraph.GraphConstructor.from_edges(
      sess, "G", edges_np, is_sparse=False)

    pr_itr: tfgraph.PageRank = tfgraph.IterativePageRank(sess, "PR", graph, beta)

    start_time: float = timeit.default_timer()
    b: np.ndarray = pr_itr.ranks_np(convergence=convergence)
    elapsed: float = timeit.default_timer() - start_time
    print(elapsed)

    print(b)
    writer.add_graph(sess.graph)
Esempio n. 6
0
def test_iterative_pagerank_upgradeable():
  beta = 0.85
  convergence=0.01
  with tf.Session() as sess:

    g_upgradeable: tfgraph.Graph = tfgraph.GraphConstructor.empty(sess, "G", 6)
    pr_upgradeable: tfgraph.PageRank = tfgraph.IterativePageRank(
      sess, "PR", g_upgradeable, beta)

    for e in tfgraph.DataSets.naive_6():
      g_upgradeable.append(e[0], e[1])

    np.testing.assert_array_almost_equal(
      pr_upgradeable.ranks_np(convergence=convergence),
      np.array([[0.0, 0.321017],
                [5.0, 0.20074403],
                [1.0, 0.17054307],
                [3.0, 0.13679263],
                [2.0, 0.10659166],
                [4.0, 0.0643118]]),
      decimal=2
    )
Esempio n. 7
0
def main():
    beta: float = 0.85
    convergence: float = 0.0001

    edges_np: np.ndarray = tfgraph.DataSets.naive_6()

    with tf.Session() as sess:
        writer: tf.summary.FileWriter = tf.summary.FileWriter(
            'logs/tensorflow/.')

        graph: tfgraph.Graph = tfgraph.GraphConstructor.from_edges(
            sess, "G", edges_np, writer, is_sparse=False)

        pr_alge: tfgraph.PageRank = tfgraph.AlgebraicPageRank(
            sess, "PR1", graph, beta)

        pr_iter: tfgraph.PageRank = tfgraph.IterativePageRank(
            sess, "PR1", graph, beta)
        '''
    g_upgradeable: tfgraph.Graph = tfgraph.GraphConstructor.empty(sess,
                                                           "Gfollowers",
                                                           7, writer)
    pr_upgradeable: tfgraph.PageRank = tfgraph.IterativePageRank(sess,
                                                          "PRfollowers",
                                                          g_upgradeable,
                                                          beta)
    '''

        # a = pr_alge.ranks_np()
        start_time: float = timeit.default_timer()
        a: np.ndarray = pr_alge.ranks_np(convergence=convergence,
                                         topics=[5],
                                         topics_decrement=True)
        elapsed: float = timeit.default_timer() - start_time
        print(elapsed)

        start_time: float = timeit.default_timer()
        b: np.ndarray = pr_iter.ranks_np(convergence=convergence,
                                         topics=[5],
                                         topics_decrement=True)
        elapsed: float = timeit.default_timer() - start_time
        print(elapsed)

        print(a)
        print(b)
        # print(c)
        # print((pr_alge.error_vector_compare_np(pr_iter)))
        # print(pr_iter.ranks_np(convergence=convergence))
        # print(pr_alge.error_ranks_compare_np(pr_iter))
        '''
    g_sparse = tfgraph.GraphConstructor.as_sparsifier(sess, graph, 0.75)

    pr_sparse = tfgraph.IterativePageRank(sess, "PR_sparse", g_sparse, beta)

    start_time: float = timeit.default_timer()
    d: np.ndarray = pr_sparse.ranks_np(convergence=convergence)
    elapsed: float = timeit.default_timer() - start_time
    print(elapsed)

    print(pr_iter.error_ranks_compare_np(pr_sparse))
    print(graph.m)
    print(g_sparse.m)
    '''
        '''
    g_sparse_upgradeable = tfgraph.GraphConstructor.empty_sparsifier(
        sess=sess, name="G_su", n=6301, p=0.5)
    pr_iter: tfgraph.PageRank = tfgraph.IterativePageRank(
        sess, "PR_su", g_sparse_upgradeable, beta)

    e = pr_iter.ranks_np(convergence=convergence)
    for r in edges_np:
        g_sparse_upgradeable.append(r[0], r[1])
        e = pr_iter.ranks_np(convergence=convergence)
        tfgraph.Utils.save_ranks("logs/csv/sparse_update.csv", e)

    print(e)
    # tfgraph.Utils.save_ranks("logs/csv/alge.csv",a)
    # tfgraph.Utils.save_ranks("logs/csv/sparse.csv", d)
    tfgraph.Utils.save_ranks("logs/csv/sparse_update.csv", e)
    '''
        '''
    print(GraphConstructor.unweighted_random(sess, "GRandom", 10 ** 2,
                                             10 ** 3, writer=writer))
    '''
        tfgraph.Utils.save_ranks("logs/csv/iter.csv", b)

        writer.add_graph(sess.graph)