Ejemplo n.º 1
0
    def test_swap_is_not_persisted_in_class(self):
        opt = self._opt()
        dictionary = DictionaryAgent(opt)

        CustomFFN = type('CustomFFN', (TransformerFFN, ), {})
        wrapped_class = TransformerGeneratorModel.with_components(
            encoder=TransformerEncoder.with_components(
                layer=TransformerEncoderLayer.with_components(
                    feedforward=CustomFFN)))
        model = wrapped_class(opt=opt, dictionary=dictionary)
        assert (
            model.swappables.encoder.swappables.layer.swappables.feedforward ==
            CustomFFN)  # type: ignore

        another_model = TransformerGeneratorModel(opt, dictionary)
        assert another_model.swappables != model.swappables
        assert issubclass(another_model.swappables.encoder,
                          TransformerEncoder)  # type: ignore

        wrapped_class.swap_components(
            encoder=TransformerEncoder.with_components(
                layer=TransformerEncoderLayer.with_components(
                    feedforward=TransformerFFN)))
        one_more_model = wrapped_class(opt=opt, dictionary=dictionary)
        assert (one_more_model.swappables.encoder.swappables.layer.swappables.
                feedforward == TransformerFFN)  # type: ignore
Ejemplo n.º 2
0
    def __init__(
        self,
        n_heads,
        n_layers,
        hidden_dim,
        ffn_size,
        reduction=True,
        attention_dropout=0.0,
        relu_dropout=0.0,
        learn_positional_embeddings=False,
    ):
        super().__init__()
        self.ffn_size = ffn_size
        self.n_layers = n_layers
        self.n_heads = n_heads
        self.out_dim = hidden_dim
        self.dim = hidden_dim
        self.reduction = reduction
        assert hidden_dim % n_heads == 0, "MM-Combiner dim must be multiple of n_heads"
        n_positions = 1024
        self.position_embeddings = nn.Embedding(n_positions, hidden_dim)
        if not learn_positional_embeddings:
            create_position_codes(n_positions,
                                  hidden_dim,
                                  out=self.position_embeddings.weight)
        else:
            nn.init.normal_(self.position_embeddings.weight, 0,
                            hidden_dim**-0.5)

        self.layers = nn.ModuleList()
        for _ in range(self.n_layers):
            self.layers.append(
                TransformerEncoderLayer(n_heads, hidden_dim, ffn_size,
                                        attention_dropout, relu_dropout))
Ejemplo n.º 3
0
 def build_model(self, states=None):
     wrapped_class = TransformerGeneratorModel.with_components(
         encoder=TransformerEncoder.with_components(
             layer=TransformerEncoderLayer.with_components(
                 self_attention=MultiHeadAttention,
                 feedforward=TransformerFFN)),
         decoder=TransformerDecoder.with_components(
             layer=TransformerDecoderLayer.with_components(
                 encoder_attention=MultiHeadAttention,
                 self_attention=MultiHeadAttention,
                 feedforward=TransformerFFN,
             )),
     )
     return wrapped_class(opt=self.opt, dictionary=self.dict)
Ejemplo n.º 4
0
 def test_swap_encoder_attention(self):
     CustomFFN = type('CustomFFN', (TransformerFFN, ), {})
     CustomFFN.forward = MagicMock()
     wrapped_class = TransformerGeneratorModel.with_components(
         encoder=TransformerEncoder.with_components(
             layer=TransformerEncoderLayer.with_components(
                 feedforward=CustomFFN)))
     opt = self._opt()
     CustomFFN.forward.assert_not_called
     model = wrapped_class(opt=opt, dictionary=DictionaryAgent(opt))
     assert isinstance(model, TransformerGeneratorModel)  # type: ignore
     try:
         model(torch.zeros(1, 1).long(),
               ys=torch.zeros(1, 1).long())  # type: ignore
     except TypeError:
         pass
     finally:
         CustomFFN.forward.assert_called