Exemplo n.º 1
0
 def test_TFOpenAIGPTLMHeadModel(self):
     from transformers import OpenAIGPTConfig, TFOpenAIGPTLMHeadModel
     keras.backend.clear_session()
     # pretrained_weights = 'openai-gpt'
     tokenizer_file = 'openai_openai-gpt.pickle'
     tokenizer = self._get_tokenzier(tokenizer_file)
     text, inputs, inputs_onnx = self._prepare_inputs(tokenizer)
     config = OpenAIGPTConfig()
     model = TFOpenAIGPTLMHeadModel(config)
     predictions = model.predict(inputs)
     onnx_model = keras2onnx.convert_keras(model, model.name)
     self.assertTrue(run_onnx_runtime(onnx_model.graph.name, onnx_model, inputs_onnx, predictions, self.model_files, rtol=1.e-2,
                          atol=1.e-4))
Exemplo n.º 2
0
 def test_TFOpenAIGPTLMHeadModel(self):
     from transformers import OpenAIGPTTokenizer, TFOpenAIGPTLMHeadModel
     pretrained_weights = 'openai-gpt'
     tokenizer = OpenAIGPTTokenizer.from_pretrained(pretrained_weights)
     text, inputs, inputs_onnx = self._prepare_inputs(tokenizer)
     model = TFOpenAIGPTLMHeadModel.from_pretrained(pretrained_weights)
     predictions = model.predict(inputs)
     onnx_model = keras2onnx.convert_keras(model, model.name)
     self.assertTrue(
         run_onnx_runtime(onnx_model.graph.name, onnx_model, inputs_onnx,
                          predictions, self.model_files))