Exemplo n.º 1
0
    def forward(self, x, adj):
        """Forward network"""

        x = layers.dropout(x, self.dropout)
        if self.layer == 1:
            x = layers.stack([att.forward(x, adj) for att in self.attentions],
                             dim=2)
            x = layers.reduce_sum(x, 2)
            x = layers.dropout(x, self.dropout)
            return layers.log_softmax(x, axis=2)
        else:
            x = layers.concat([att.forward(x, adj) for att in self.attentions],
                              axis=2)
            x = layers.dropout(x, self.dropout)
            return self.out_att.forward(x, adj)
Exemplo n.º 2
0
 def kl_loss(self, logits):
     assert len(
         logits
     ) == self.model_num, "The number of logits must match the number of models"
     if self.model_num == 1:
         return []
     kl_losses = []
     for i in range(self.model_num):
         cur_kl_loss = 0
         for j in range(self.model_num):
             if i != j:
                 if PADDLE_VERSION == 2.0:
                     log_softmax = LogSoftmax(axis=1)
                     x = log_softmax(logits[i])
                 else:
                     x = fluid.layers.log_softmax(logits[i], axis=1)
                 y = fluid.layers.softmax(logits[j], axis=1)
                 cur_kl_loss += fluid.layers.kldiv_loss(
                     x, y, reduction='batchmean')
         kl_losses.append(cur_kl_loss / (self.model_num - 1))
     return kl_losses
Exemplo n.º 3
0
def soft_cross_entropy(inp, target):
    inp_likelihood = L.log_softmax(inp, axis=-1)
    target_prob = L.softmax(target, axis=-1)
    return -1. * L.mean(L.reduce_sum(inp_likelihood * target_prob, dim=-1))