Beispiel #1
0
    def test_no_token_type_layer(self):
        params = copy.deepcopy(self.params_dict)
        params["type_vocab_size"] = 0
        params = Params(params)
        module = TransformerEmbeddings.from_params(params)

        assert len(module.embeddings) == 2
Beispiel #2
0
    def setup_method(self):
        super().setup_method()

        self.params_dict = {key: val for key, val in PARAMS_DICT.items()}

        params = Params(copy.deepcopy(self.params_dict))

        self.transformer_embeddings = TransformerEmbeddings.from_params(params)
def test_output_size(params):
    input_ids = torch.tensor([[1, 2]])
    token_type_ids = torch.tensor([[1, 0]], dtype=torch.long)
    position_ids = torch.tensor([[0, 1]])
    params["output_size"] = 7
    module = TransformerEmbeddings.from_params(params)
    output = module(input_ids=input_ids,
                    token_type_ids=token_type_ids,
                    position_ids=position_ids)

    assert output.shape[-1] == 7
Beispiel #4
0
    def test_output_size(self):
        input_ids = torch.tensor([[1, 2]])
        token_type_ids = torch.tensor([[1, 0]], dtype=torch.long)
        position_ids = torch.tensor([[0, 1]])
        params = copy.deepcopy(self.params_dict)
        params["output_size"] = 7
        params = Params(params)
        module = TransformerEmbeddings.from_params(params)
        output = module.forward(
            input_ids=input_ids, token_type_ids=token_type_ids, position_ids=position_ids
        )

        assert output.shape[-1] == 7
def transformer_embeddings(params):
    return TransformerEmbeddings.from_params(params.duplicate())
def test_no_token_type_layer(params):
    params["type_vocab_size"] = 0
    module = TransformerEmbeddings.from_params(params)
    assert len(module.embeddings) == 2