Beispiel #1
0
    def forward(self, incoming):
        inp = Storage()
        inp.attn_mask = incoming.data.sent_attnmask
        inp.sent = incoming.data.sent

        incoming.gen = gen = Storage()
        self.teacherForcing(inp, gen)

        w_o_f = flattenSequence(gen.w.transpose(0, 1),
                                incoming.data.sent_length - 1)
        data_f = flattenSequence(
            incoming.data.sent.transpose(0, 1)[1:],
            incoming.data.sent_length - 1)
        incoming.result.word_loss = self.lossCE(w_o_f, data_f)
        incoming.result.perplexity = torch.exp(incoming.result.word_loss)