Ejemplo n.º 1
0
def test_brandes_vs_drbc_connection_prob():
    with open('DrBC/results/Brandes_vs_DrBC.txt', 'a') as f:
        connection_prob = np.array(
            [0.1, 0.2, 0.3, 0.4, 0.5, 0.6, 0.7, 0.8, 0.9])
        number_of_nodes = 500
        running_time_brandes = np.zeros(len(connection_prob))
        running_time_drbc = np.zeros(len(connection_prob))
        encoder_decoder = ed.EncoderDecoder()

        for i, prob in enumerate(connection_prob):
            G = csr.erdos_renyi_graph(number_of_nodes, prob)

            start_time = time.time()
            encoder_decoder.predict(G=G)
            running_time_drbc[i] = float((str(time.time() - start_time)[:5]))

            start_time = time.time()
            utils.betweenness_centrality(G)
            running_time_brandes[i] = float(
                (str(time.time() - start_time)[:5]))

        s0 = 'connection probabilities: ' + str(connection_prob)
        s1 = 'number of nodes: ' + str(number_of_nodes)
        s2 = 'DrBc Running time: ' + str(running_time_drbc)
        s3 = 'Brandes running time: ' + str(running_time_brandes)
        f.write(s0)
        f.write(s1)
        f.write(s2)
        f.write(s3)

        plt.title('Running Time for DrBC and Brandes - Erdos Renyi graphs')
        plt.plot(connection_prob, running_time_drbc, '-o', color="tab:green")
        plt.plot(connection_prob, running_time_brandes, '-o', color="tab:blue")
        plt.xlabel('connection between nodes probability')
        plt.show()
Ejemplo n.º 2
0
def memory_test():
    number_of_nodes = np.array([10, 50, 100, 150, 200])
    memory = np.zeros(len(number_of_nodes))

    encoder_decoder = ed.EncoderDecoder()

    for i, n_nodes in enumerate(number_of_nodes):
        G = csr.erdos_renyi_graph(n_nodes, 0.3)
        process = psutil.Process(os.getpid())
        encoder_decoder.predict(G=G)
        memory[i] = process.memory_info().rss

    plt.title('Memory Used by DrBC - Erdos Renyi Graphs')
    plt.plot(number_of_nodes, memory)
    plt.show()
Ejemplo n.º 3
0
def test_random_graphs():
    with open('results/Brandes_vs_DrBC.txt', 'a'):

        number_of_nodes = np.array([10, 100, 1000])
        running_time = np.zeros(len(number_of_nodes))
        encoder_decoder = ed.EncoderDecoder()

        for i, n_nodes in enumerate(number_of_nodes):
            G = csr.erdos_renyi_graph(n_nodes, 0.3)
            start_time = time.time()
            encoder_decoder.predict(G=G)
            running_time[i] = float((str(time.time() - start_time)[:5]))

        plt.title('Running Time for DrBC - random graphs')
        plt.plot(number_of_nodes, running_time)
        plt.show()
Ejemplo n.º 4
0
def test_real_graphs():
    files = os.listdir('tsv_graphs')
    encoder_decoder = ed.EncoderDecoder()

    number_of_nodes = np.zeros(len(files))
    running_time = np.zeros(len(files))

    for i, file in enumerate(files):
        full_path = 'tsv_graphs/' + file
        G = csr.CSR(tsv_file=full_path)
        number_of_nodes[i] = G.n_vertices
        start_time = time.time()
        encoder_decoder.predict(G=G)
        running_time[i] = float((str(time.time() - start_time)[:5]))

    running_time = sorted(running_time)
    number_of_nodes = sorted(number_of_nodes)

    plt.title('Running Time for DrBC - Real Graphs')
    plt.plot(number_of_nodes, running_time)
    plt.show()
Ejemplo n.º 5
0
    def encode(self, G, v, X, L, W0, W1, W2, W3, U1, U2, U3):
        """h - embeddings dos nodes em cada camada (0...L)"""
        h = [None] * (L + 1)
        h[0] = np.zeros((L + 1, len(X)))
        for i in range(1, L + 1):
            h[i] = np.zeros((L + 1, self.embedding_dimension))
        """hN - embeddings do neighborhood dos nodes nas camadas 2..L (tem a mesma dimensão por uma questao de simplicidade,
         os 2 primeiros elementos vão ficar a 0) """
        hN = np.zeros((L + 1, self.embedding_dimension))

        h[0] = np.transpose(X)

        self.H[0][v] = h[0]

        for node in range(self.nNodes):
            self.H[1][node] = ed.ReLU(
                np.matmul(W0, np.transpose(self.H[0][node])))
            if self.H[1][node].any():  # se nao for um vetor de zeros
                self.H[1][node] = self.H[1][node] / la.norm(self.H[1][node], 2)

        h[1] = self.H[1][v]

        for l in range(2, L + 1):
            for node in range(self.nNodes):
                """AGGREGATE"""
                self.HN[l, node] = self.aggregateNeighborhood(
                    G, node, G.get_neighbors(node), l)
                """COMBINE"""
                self.H[l][node] = self.GRUCell(self.H[l - 1][node],
                                               self.HN[l, node], W1, W2, W3,
                                               U1, U2, U3)

            self.H[l][v] = self.H[l][v] / la.norm(self.H[l][v], 2)
            h[l] = self.H[l][v]
        """z sera o embedding final, obtido atraves da funcao maxpool"""
        z = self.maxPool(h[1:], self.embedding_dimension)
        return [z]
Ejemplo n.º 6
0
def test_fit():
    encoder_decoder = ed.EncoderDecoder(n_iterations=10)

    start_time = time.time()