Exemplo n.º 1
0
 def get_adj(self, n, edges):
     adj = sp.coo_matrix((np.ones(len(edges)), (edges[:, 0], edges[:, 1])),
                         shape=(n, n),
                         dtype='float32')
     adj_ = spm_to_tensor(normt_spm(adj, method='in')).cuda()
     r_adj = spm_to_tensor(normt_spm(adj.transpose(), method='in')).cuda()
     return adj_, r_adj
Exemplo n.º 2
0
    def __init__(self, n, edges_set, in_channels, out_channels, hidden_layers):
        super().__init__()

        self.n = n
        self.d = len(edges_set)

        self.a_adj_set = []
        self.r_adj_set = []

        for edges in edges_set:
            edges = np.array(edges)
            adj = sp.coo_matrix(
                (np.ones(len(edges)), (edges[:, 0], edges[:, 1])),
                shape=(n, n),
                dtype='float32')
            a_adj = spm_to_tensor(normt_spm(adj, method='in')).cuda()
            r_adj = spm_to_tensor(normt_spm(adj.transpose(),
                                            method='in')).cuda()
            self.a_adj_set.append(a_adj)
            self.r_adj_set.append(r_adj)

        hl = hidden_layers.split(',')
        if hl[-1] == 'd':
            dropout_last = True
            hl = hl[:-1]
        else:
            dropout_last = False

        self.a_att = nn.Parameter(torch.ones(self.d))
        self.r_att = nn.Parameter(torch.ones(self.d))

        i = 0
        layers = []
        last_c = in_channels
        for c in hl:
            if c[0] == 'd':
                dropout = True
                c = c[1:]
            else:
                dropout = False
            c = int(c)

            i += 1
            conv = GraphConv(last_c, c, dropout=dropout)
            self.add_module('conv{}'.format(i), conv)
            layers.append(conv)

            last_c = c

        conv = GraphConv(last_c,
                         out_channels,
                         relu=False,
                         dropout=dropout_last)
        self.add_module('conv-last', conv)
        layers.append(conv)

        self.layers = layers
Exemplo n.º 3
0
 def get_adj(self, weights, edges, n):
     adj = sp.coo_matrix((weights, (edges[:, 0], edges[:, 1])),
                         shape=(n, n),
                         dtype='float32')
     adj = normt_spm(adj, method='in')
     adj = spm_to_tensor(adj)
     return adj.cuda()
Exemplo n.º 4
0
Arquivo: gcn.py Projeto: CenIII/adgpm
    def __init__(self,
                 n,
                 edges,
                 in_channels,
                 out_channels,
                 hidden_layers,
                 norm_method='in'):
        super().__init__()

        edges = np.array(edges)
        adj = sp.coo_matrix((np.ones(len(edges)), (edges[:, 0], edges[:, 1])),
                            shape=(n, n),
                            dtype='float32')
        adj = normt_spm(adj, method=norm_method)
        adj = spm_to_tensor(adj)
        self.adj = adj.cuda()

        hl = hidden_layers.split(',')
        if hl[-1] == 'd':
            dropout_last = True
            hl = hl[:-1]
        else:
            dropout_last = False

        i = 0
        layers = []
        last_c = in_channels
        for c in hl:
            if c[0] == 'd':
                dropout = True
                c = c[1:]
            else:
                dropout = False
            c = int(c)

            i += 1
            conv = GraphConv(last_c, c, dropout=dropout)
            self.add_module('conv{}'.format(i), conv)
            layers.append(conv)

            last_c = c

        conv = GraphConv(last_c,
                         out_channels,
                         relu=False,
                         dropout=dropout_last)
        self.add_module('conv-last', conv)
        layers.append(conv)

        self.layers = layers
Exemplo n.º 5
0
    def __init__(self, n, edges_set, weights_set, in_channels, out_channels, hidden_layers):
        super().__init__()

        self.n = n
        self.d = len(edges_set)

        self.a_adj_set = []

        for i, edges in enumerate(edges_set):
            edges = np.array(edges)
            adj = sp.coo_matrix((np.array(weights_set[i]), (edges[:, 0], edges[:, 1])),
                                shape=(n, n), dtype='float32')
            a_adj = spm_to_tensor(normt_spm(adj, method='in')).cuda()
            self.a_adj_set.append(a_adj)

        print(self.a_adj_set)

        hl = hidden_layers.split(',')
        if hl[-1] == 'd':
            dropout_last = True
            hl = hl[:-1]
        else:
            dropout_last = False

        i = 0
        layers = []
        last_c = in_channels
        for c in hl:
            if c[0] == 'd':
                dropout = True
                c = c[1:]
            else:
                dropout = False
            c = int(c)

            i += 1
            conv = GraphConv(last_c, c, self.d, dropout=dropout)
            self.add_module('conv{}'.format(i), conv)
            layers.append(conv)

            last_c = c

        conv = GraphConv(last_c, out_channels, self.d, relu=False, dropout=dropout_last)
        self.add_module('conv-last', conv)
        layers.append(conv)

        self.layers = layers
Exemplo n.º 6
0
    def __init__(self, n, edges, in_channels, out_channels, hidden_layers, k):
        super().__init__()

        edges = np.array(edges)
        adj = sp.coo_matrix((np.ones(len(edges)), (edges[:, 0], edges[:, 1])),
                            shape=(n, n),
                            dtype='float32')
        adj = normt_spm(adj, method='in')
        adj = spm_to_tensor(adj)
        self.adj = adj.cuda()
        self.k = k

        hl = hidden_layers.split(',')
        if hl[-1] == 'd':
            dropout_last = True
            hl = hl[:-1]
        else:
            dropout_last = False

        layers = []
        last_c = in_channels
        for i, c in enumerate(hl):
            if c[0] == 'd':
                dropout = True
                c = c[1:]
            else:
                dropout = False
            c = int(c)

            layers.append(FullyConnect(last_c, c, dropout=dropout))
            last_c = c
        layers.append(
            FullyConnect(last_c,
                         out_channels,
                         relu=False,
                         dropout=dropout_last))

        self.mlp = nn.Sequential(*layers)