예제 #1
0
    def test_multi_doc_decoder(self):
        self._config = utils.get_test_params(cls=configs.NHNetConfig)
        seq_length = 10
        num_docs = 5
        encoder_input_ids = tf.keras.layers.Input(shape=(num_docs, seq_length),
                                                  name="encoder_input_ids",
                                                  dtype=tf.int32)
        target_ids = tf.keras.layers.Input(shape=(seq_length, ),
                                           name="target_ids",
                                           dtype=tf.int32)
        encoder_outputs = tf.keras.layers.Input(
            shape=(num_docs, seq_length, self._config.hidden_size),
            name="all_encoder_outputs",
            dtype=tf.float32)
        embedding_lookup = layers.OnDeviceEmbedding(
            vocab_size=self._config.vocab_size,
            embedding_width=self._config.hidden_size,
            initializer=tf.keras.initializers.TruncatedNormal(
                stddev=self._config.initializer_range),
            name="word_embeddings")
        doc_attention_probs = tf.keras.layers.Input(
            shape=(self._config.num_decoder_attn_heads, seq_length, num_docs),
            name="doc_attention_probs",
            dtype=tf.float32)
        cross_attention_bias = decoder.AttentionBias(
            bias_type="multi_cross")(encoder_input_ids)
        self_attention_bias = decoder.AttentionBias(
            bias_type="decoder_self")(target_ids)

        inputs = dict(attention_bias=cross_attention_bias,
                      self_attention_bias=self_attention_bias,
                      target_ids=target_ids,
                      all_encoder_outputs=encoder_outputs,
                      doc_attention_probs=doc_attention_probs)

        decoder_layer = decoder.Decoder(self._config, embedding_lookup)
        outputs = decoder_layer(inputs)
        model_inputs = dict(encoder_input_ids=encoder_input_ids,
                            target_ids=target_ids,
                            all_encoder_outputs=encoder_outputs,
                            doc_attention_probs=doc_attention_probs)
        model = tf.keras.Model(inputs=model_inputs,
                               outputs=outputs,
                               name="test")
        self.assertLen(decoder_layer.trainable_weights, 30)
        # Forward path.
        fake_inputs = {
            "encoder_input_ids":
            np.zeros((2, num_docs, seq_length), dtype=np.int32),
            "target_ids":
            np.zeros((2, seq_length), dtype=np.int32),
            "all_encoder_outputs":
            np.zeros((2, num_docs, seq_length, 16), dtype=np.float32),
            "doc_attention_probs":
            np.zeros(
                (2, self._config.num_decoder_attn_heads, seq_length, num_docs),
                dtype=np.float32)
        }
        output_tensor = model(fake_inputs)
        self.assertEqual(output_tensor.shape, (2, seq_length, 16))
 def setUp(self):
     super(TrainerTest, self).setUp()
     self._config = utils.get_test_params()
     self._config.override(
         {
             "vocab_size": 49911,
             "max_position_embeddings": 200,
             "len_title": 15,
             "len_passage": 20,
             "beam_size": 5,
             "alpha": 0.6,
             "learning_rate": 0.0,
             "learning_rate_warmup_steps": 0,
             "multi_channel_cross_attention": True,
             "passage_list": ["a", "b"],
         },
         is_strict=False)
예제 #3
0
 def setUp(self):
     super(DecoderTest, self).setUp()
     self._config = utils.get_test_params()
예제 #4
0
 def setUp(self):
     super(Bert2BertTest, self).setUp()
     self._config = utils.get_test_params()
예제 #5
0
 def setUp(self):
     super(NHNetTest, self).setUp()
     self._nhnet_config = configs.NHNetConfig()
     self._nhnet_config.override(utils.get_test_params().as_dict())
     self._bert2bert_config = configs.BERT2BERTConfig()
     self._bert2bert_config.override(utils.get_test_params().as_dict())