Exemple #1
0
    def test_layer_creation(self):
        sequence_length = 21
        width = 80

        call_list = []
        attention_layer_cfg = {
            'num_heads': 10,
            'key_dim': 8,
            'call_list': call_list,
        }
        test_layer = transformer_scaffold.TransformerScaffold(
            attention_cls=ValidatedAttentionLayer,
            attention_cfg=attention_layer_cfg,
            num_attention_heads=10,
            intermediate_size=2048,
            intermediate_activation='relu')

        # Create a 3-dimensional input (the first dimension is implicit).
        data_tensor = tf.keras.Input(shape=(sequence_length, width))
        output_tensor = test_layer(data_tensor)
        # The default output of a transformer layer should be the same as the input.
        self.assertEqual(data_tensor.shape.as_list(),
                         output_tensor.shape.as_list())

        # If call_list[0] exists and is True, the passed layer class was
        # instantiated from the given config properly.
        self.assertNotEmpty(call_list)
        self.assertTrue(call_list[0],
                        "The passed layer class wasn't instantiated.")
Exemple #2
0
    def test_layer_restoration_from_config(self):
        sequence_length = 21
        width = 80

        call_list = []
        attention_layer_cfg = {
            'num_heads': 10,
            'key_dim': 8,
            'call_list': call_list,
            'name': 'test_layer',
        }
        test_layer = transformer_scaffold.TransformerScaffold(
            attention_cls=ValidatedAttentionLayer,
            attention_cfg=attention_layer_cfg,
            num_attention_heads=10,
            intermediate_size=2048,
            intermediate_activation='relu')

        # Create a 3-dimensional input (the first dimension is implicit).
        data_tensor = tf.keras.Input(shape=(sequence_length, width))
        # Create a 2-dimensional input (the first dimension is implicit).
        mask_tensor = tf.keras.Input(shape=(sequence_length, sequence_length))
        output_tensor = test_layer([data_tensor, mask_tensor])

        # Create a model from the test layer.
        model = tf.keras.Model([data_tensor, mask_tensor], output_tensor)

        # Invoke the model on test data. We can't validate the output data itself
        # (the NN is too complex) but this will rule out structural runtime errors.
        batch_size = 6
        input_data = 10 * np.random.random_sample(
            (batch_size, sequence_length, width))
        # The attention mask should be of shape (batch, from_seq_len, to_seq_len),
        # which here is (batch, sequence_length, sequence_length)
        mask_data = np.random.randint(2,
                                      size=(batch_size, sequence_length,
                                            sequence_length))
        pre_serialization_output = model.predict([input_data, mask_data])

        # Serialize the model config. Pass the serialized data through json to
        # ensure that we can serialize this layer to disk.
        serialized_data = json.dumps(model.get_config())
        post_string_serialized_data = json.loads(serialized_data)

        # Create a new model from the old config, and copy the weights. These models
        # should have identical outputs.
        new_model = tf.keras.Model.from_config(post_string_serialized_data)
        new_model.set_weights(model.get_weights())
        output = new_model.predict([input_data, mask_data])

        self.assertAllClose(pre_serialization_output, output)

        # If the layer was configured correctly, it should have a list attribute
        # (since it should have the custom class and config passed to it).
        new_model.summary()
        new_call_list = new_model.get_layer(
            name='transformer_scaffold')._attention_layer.list
        self.assertNotEmpty(new_call_list)
        self.assertTrue(new_call_list[0],
                        "The passed layer class wasn't instantiated.")
Exemple #3
0
    def test_layer_invocation(self):
        sequence_length = 21
        width = 80

        call_list = []
        attention_layer_cfg = {
            'num_heads': 10,
            'key_dim': 8,
            'call_list': call_list,
        }
        test_layer = transformer_scaffold.TransformerScaffold(
            attention_cls=ValidatedAttentionLayer,
            attention_cfg=attention_layer_cfg,
            num_attention_heads=10,
            intermediate_size=2048,
            intermediate_activation='relu')

        # Create a 3-dimensional input (the first dimension is implicit).
        data_tensor = tf.keras.Input(shape=(sequence_length, width))
        output_tensor = test_layer(data_tensor)

        # Create a model from the test layer.
        model = tf.keras.Model(data_tensor, output_tensor)

        # Invoke the model on test data. We can't validate the output data itself
        # (the NN is too complex) but this will rule out structural runtime errors.
        batch_size = 6
        input_data = 10 * np.random.random_sample(
            (batch_size, sequence_length, width))
        _ = model.predict(input_data)
        # If call_list[0] exists and is True, the passed layer class was
        # instantiated from the given config properly.
        self.assertNotEmpty(call_list)
        self.assertTrue(call_list[0],
                        "The passed layer class wasn't instantiated.")
Exemple #4
0
    def test_layer_creation_with_incorrect_mask_fails(self):
        sequence_length = 21
        width = 80

        call_list = []
        attention_layer_cfg = {
            'num_heads': 10,
            'key_dim': 8,
            'call_list': call_list,
        }
        test_layer = transformer_scaffold.TransformerScaffold(
            attention_cls=ValidatedAttentionLayer,
            attention_cfg=attention_layer_cfg,
            num_attention_heads=10,
            intermediate_size=2048,
            intermediate_activation='relu')

        # Create a 3-dimensional input (the first dimension is implicit).
        data_tensor = tf.keras.Input(shape=(sequence_length, width))
        # Create a 2-dimensional input (the first dimension is implicit).
        mask_tensor = tf.keras.Input(shape=(sequence_length,
                                            sequence_length - 3))
        with self.assertRaisesRegex(ValueError,
                                    'When passing a mask tensor.*'):
            _ = test_layer([data_tensor, mask_tensor])
Exemple #5
0
    def test_layer_invocation_with_feedforward_cls(self):
        sequence_length = 21
        width = 80

        call_list = []
        attention_layer_cfg = {
            'num_heads': 10,
            'key_dim': 8,
            'call_list': call_list,
        }
        feedforward_call_list = []
        feedforward_layer_cfg = {
            'activation': 'relu',
            'call_list': feedforward_call_list,
        }
        feedforward_layer = ValidatedFeedforwardLayer(**feedforward_layer_cfg)
        test_layer = transformer_scaffold.TransformerScaffold(
            attention_cls=ValidatedAttentionLayer,
            attention_cfg=attention_layer_cfg,
            feedforward_cls=feedforward_layer,
            num_attention_heads=10,
            intermediate_size=None,
            intermediate_activation=None)

        # Create a 3-dimensional input (the first dimension is implicit).
        data_tensor = tf.keras.Input(shape=(sequence_length, width))
        # Create a 2-dimensional input (the first dimension is implicit).
        mask_tensor = tf.keras.Input(shape=(sequence_length, sequence_length))
        output_tensor = test_layer([data_tensor, mask_tensor])

        # Create a model from the test layer.
        model = tf.keras.Model([data_tensor, mask_tensor], output_tensor)

        # Invoke the model on test data. We can't validate the output data itself
        # (the NN is too complex) but this will rule out structural runtime errors.
        batch_size = 6
        input_data = 10 * np.random.random_sample(
            (batch_size, sequence_length, width))
        # The attention mask should be of shape (batch, from_seq_len, to_seq_len),
        # which here is (batch, sequence_length, sequence_length)
        mask_data = np.random.randint(2,
                                      size=(batch_size, sequence_length,
                                            sequence_length))
        _ = model.predict([input_data, mask_data])
        # If call_list[0] exists and is True, the passed layer class was
        # instantiated from the given config properly.
        self.assertNotEmpty(call_list)
        self.assertTrue(call_list[0],
                        "The passed layer class wasn't instantiated.")
        self.assertNotEmpty(feedforward_call_list)
        self.assertTrue(feedforward_call_list[0],
                        "The passed layer class wasn't instantiated.")