Exemplo n.º 1
0
 def test_3layer_gpt2(self):
     from transformers import GPT2Config, TFGPT2Model, BertTokenizer
     keras2onnx.proto.keras.backend.set_learning_phase(0)
     config = GPT2Config(n_layer=3)
     model = TFGPT2Model(config)
     tokenizer = BertTokenizer.from_pretrained("bert-base-uncased")
     text, inputs, inputs_onnx = self._prepare_inputs(tokenizer)
     inputs = tokenizer.encode_plus(text, add_special_tokens=True, return_tensors='tf')
     predictions = model.predict(inputs)
     onnx_model = keras2onnx.convert_keras(model, model.name)
     self.assertTrue(run_onnx_runtime(onnx_model.graph.name, onnx_model, inputs_onnx, predictions, self.model_files))
Exemplo n.º 2
0
    def __init__(self, num_units, output_embed_num_units):
        super().__init__()
        self.config = GPT2Config(vocab_size=1,
                                 n_positions=1024,
                                 n_ctx=1024,
                                 n_embd=num_units,
                                 n_layer=6,
                                 n_head=8)
        self.input_embedding = tf.keras.layers.Dense(num_units)
        self.transformer = TFGPT2Model(self.config)
        self.output_embedding = tf.keras.layers.Dense(output_embed_num_units)
        self.text_idx_embedding = tf.keras.layers.Embedding(
            MAX_NUM_TOKENS,
            self.config.n_embd,
            embeddings_initializer=modeling_tf_utils.get_initializer(
                self.config.initializer_range))

        self.obj_idx_embedding = tf.keras.layers.Embedding(
            MAX_NUM_TOKENS,
            self.config.n_embd,
            embeddings_initializer=modeling_tf_utils.get_initializer(
                self.config.initializer_range))