Ejemplo n.º 1
0
 def forward(self, x):
     """Forward network"""
     mask = layers.reduce_any(x != self.pad_index, -1)
     lens = nn.reduce_sum(mask, -1)
     masked_x = nn.masked_select(x, mask)
     h, _ = self.transformer(masked_x)
     feat_embed = nn.pad_sequence_paddle(
         layers.split(h, lens.numpy().tolist(), dim=0), self.pad_index)
     return feat_embed
Ejemplo n.º 2
0
    def forward(self, x):
        """Forward network"""
        mask = layers.reduce_any(x != self.pad_index, -1)
        lens = nn.reduce_sum(mask, -1)
        masked_x = nn.masked_select(x, mask)
        char_mask = masked_x != self.pad_index
        emb = self.embed(masked_x)

        _, (h, _) = self.lstm(emb, char_mask, self.pad_index)
        h = layers.concat(layers.unstack(h), axis=-1)
        feat_embed = nn.pad_sequence_paddle(
            layers.split(h, lens.numpy().tolist(), dim=0), self.pad_index)
        return feat_embed
Ejemplo n.º 3
0
 def flat_words(self, words):
     pad_index = self.args.pad_index
     lens = nn.reduce_sum(words != pad_index, dim=-1)
     position = layers.cumsum(lens + layers.cast((lens == 0), "int32"),
                              axis=1) - 1
     flat_words = nn.masked_select(words, words != pad_index)
     flat_words = nn.pad_sequence_paddle(
         layers.split(flat_words,
                      layers.reduce_sum(lens, -1).numpy().tolist(),
                      pad_index))
     max_len = flat_words.shape[1]
     position = nn.mask_fill(position, position >= max_len, max_len - 1)
     return flat_words, position