示例#1
0
文件: nklm.py 项目: neulab/lrlm
 def _pick_embed(embedding: nn.Embedding, indices: LongTensor,
                 mask: torch.ByteTensor):
     indices = indices.clone()
     indices[~mask] = 0
     embed = embedding.forward(indices)
     embed *= mask.to(dtype=torch.float).unsqueeze(-1).expand_as(embed)
     return embed
示例#2
0
class PackedEmbedding(Module):
    def __init__(self, dimension: int, vocabulary: Vocabulary):
        super().__init__()
        self.embedding = Embedding(
            num_embeddings=len(vocabulary), embedding_dim=dimension
        )

    def forward(self, inputs: PackedSequence) -> PackedSequence:  # type: ignore
        """Embed the packed sequence given as input."""
        return PackedSequence(self.embedding.forward(inputs.data), inputs.batch_sizes)