def main(): beta: float = 0.85 convergence: float = 0.01 edges_np: np.ndarray = tf_G.DataSets.naive_6() with tf.Session() as sess: writer: tf.summary.FileWriter = tf.summary.FileWriter( 'logs/tensorflow/.') graph: tf_G.Graph = tf_G.GraphConstructor.from_edges( sess, "G1", edges_np, writer, is_sparse=False) pr_iter: tf_G.PageRank = tf_G.IterativePageRank( sess, "PR1", graph, beta) g_upgradeable: tf_G.Graph = tf_G.GraphConstructor.empty( sess, "G2", graph.n, writer) pr_upgradeable: tf_G.PageRank = tf_G.IterativePageRank( sess, "PR2", g_upgradeable, beta) b: np.ndarray = pr_iter.ranks_np(convergence=convergence) print(b) for e in edges_np: g_upgradeable.append(e[0], e[1]) print("[" + str(e[0]) + ", " + str(e[1]) + "]") e = pr_upgradeable.ranks_np(convergence=convergence) print(e) writer.add_graph(sess.graph)
def main(): beta: float = 0.85 convergence: float = 0.0001 edges_np: np.ndarray = tf_G.DataSets.naive_6() with tf.Session() as sess: graph: tf_G.Graph = tf_G.GraphConstructor.from_edges(sess, "G", edges_np, is_sparse=False) pr_iter: tf_G.PageRank = tf_G.IterativePageRank( sess, "PR1", graph, beta) v_4: np.ndarray = pr_iter.pagerank_vector_np(convergence=convergence, topics=[4]) v_5: np.ndarray = pr_iter.pagerank_vector_np(convergence=convergence, topics=[5]) v_45: np.ndarray = pr_iter.pagerank_vector_np(convergence=convergence, topics=[4, 5]) v_45_pseudo: np.ndarray = (v_4 + v_5) / 2.0 print(v_4) print(np.sum(v_4)) print(v_5) print(np.sum(v_5)) print(v_45) print(np.sum(v_45)) print(v_45_pseudo) print(np.sum(v_45_pseudo))
def test_iterative_personalized_pagerank_steps(): with tf.Session() as sess: beta = 0.85 steps = 100 graph = tf_G.GraphConstructor.from_edges(sess, "G_proof", edges_np=tf_G.DataSets.naive_6()) pageRank = tf_G.IterativePageRank(sess, "Pr_Proof", graph, beta) np.testing.assert_array_almost_equal( pageRank.ranks_np(steps=steps, topics=[4]), np.array([[4, 0.999186], [0, 0.000284], [5, 0.000176], [1, 0.000150], [3, 0.000116], [2, 0.000089]]), decimal=2 )
def test_iterative_pagerank_convergence(): with tf.Session() as sess: beta = 0.85 convergence = 0.01 graph = tf_G.GraphConstructor.from_edges(sess, "G_proof", edges_np=tf_G.DataSets.naive_6()) pageRank = tf_G.IterativePageRank(sess, "Pr_Proof", graph, beta) np.testing.assert_array_almost_equal( pageRank.ranks_np(convergence=convergence), np.array([[0.0, 0.321017], [5.0, 0.20074403], [1.0, 0.17054307], [3.0, 0.13679263], [2.0, 0.10659166], [4.0, 0.0643118]]), decimal=2 )
def test_iterative_pagerank_upgradeable(): beta = 0.85 convergence=0.01 with tf.Session() as sess: g_upgradeable: tf_G.Graph = tf_G.GraphConstructor.empty(sess, "G", 6) pr_upgradeable: tf_G.PageRank = tf_G.IterativePageRank( sess, "PR", g_upgradeable, beta) for e in tf_G.DataSets.naive_6(): g_upgradeable.append(e[0], e[1]) np.testing.assert_array_almost_equal( pr_upgradeable.ranks_np(convergence=convergence), np.array([[0.0, 0.321017], [5.0, 0.20074403], [1.0, 0.17054307], [3.0, 0.13679263], [2.0, 0.10659166], [4.0, 0.0643118]]), decimal=2 )
def main(): beta: float = 0.85 convergence: float = 0.0001 edges_np: np.ndarray = tf_G.DataSets.naive_6() with tf.Session() as sess: writer: tf.summary.FileWriter = tf.summary.FileWriter( 'logs/tensorflow/') graph: tf_G.Graph = tf_G.GraphConstructor.from_edges(sess, "G", edges_np, is_sparse=False) pr_itr: tf_G.PageRank = tf_G.IterativePageRank(sess, "PR", graph, beta) start_time: float = timeit.default_timer() b: np.ndarray = pr_itr.ranks_np(convergence=convergence) elapsed: float = timeit.default_timer() - start_time print(elapsed) print(b) writer.add_graph(sess.graph)
def main(): beta: float = 0.85 convergence: float = 0.0001 edges_np: np.ndarray = tf_G.DataSets.naive_6() with tf.Session() as sess: writer: tf.summary.FileWriter = tf.summary.FileWriter( 'logs/tensorflow/.') graph: tf_G.Graph = tf_G.GraphConstructor.from_edges( sess, "G", edges_np, writer, is_sparse=False) pr_alge: tf_G.PageRank = tf_G.AlgebraicPageRank(sess, "PR1", graph, beta) pr_iter: tf_G.PageRank = tf_G.IterativePageRank(sess, "PR1", graph, beta) ''' g_upgradeable: tf_G.Graph = tf_G.GraphConstructor.empty(sess, "Gfollowers", 7, writer) pr_upgradeable: tf_G.PageRank = tf_G.IterativePageRank(sess, "PRfollowers", g_upgradeable, beta) ''' # a = pr_alge.ranks_np() start_time: float = timeit.default_timer() a: np.ndarray = pr_alge.ranks_np(convergence=convergence, topics=[5], topics_decrement=True) elapsed: float = timeit.default_timer() - start_time print(elapsed) start_time: float = timeit.default_timer() b: np.ndarray = pr_iter.ranks_np(convergence=convergence, topics=[5], topics_decrement=True) elapsed: float = timeit.default_timer() - start_time print(elapsed) print(a) print(b) # print(c) # print((pr_alge.error_vector_compare_np(pr_iter))) # print(pr_iter.ranks_np(convergence=convergence)) # print(pr_alge.error_ranks_compare_np(pr_iter)) ''' g_sparse = tf_G.GraphConstructor.as_sparsifier(sess, graph, 0.75) pr_sparse = tf_G.IterativePageRank(sess, "PR_sparse", g_sparse, beta) start_time: float = timeit.default_timer() d: np.ndarray = pr_sparse.ranks_np(convergence=convergence) elapsed: float = timeit.default_timer() - start_time print(elapsed) print(pr_iter.error_ranks_compare_np(pr_sparse)) print(graph.m) print(g_sparse.m) ''' ''' g_sparse_upgradeable = tf_G.GraphConstructor.empty_sparsifier( sess=sess, name="G_su", n=6301, p=0.5) pr_iter: tf_G.PageRank = tf_G.IterativePageRank( sess, "PR_su", g_sparse_upgradeable, beta) e = pr_iter.ranks_np(convergence=convergence) for r in edges_np: g_sparse_upgradeable.append(r[0], r[1]) e = pr_iter.ranks_np(convergence=convergence) tf_G.Utils.save_ranks("logs/csv/sparse_update.csv", e) print(e) # tf_G.Utils.save_ranks("logs/csv/alge.csv",a) # tf_G.Utils.save_ranks("logs/csv/sparse.csv", d) tf_G.Utils.save_ranks("logs/csv/sparse_update.csv", e) ''' ''' print(GraphConstructor.unweighted_random(sess, "GRandom", 10 ** 2, 10 ** 3, writer=writer)) ''' tf_G.Utils.save_ranks("logs/csv/iter.csv", b) writer.add_graph(sess.graph)