Example #1
0
 def test_TFRobertaForMaskedLM(self):
     from transformers import RobertaConfig, TFRobertaForMaskedLM
     keras.backend.clear_session()
     # pretrained_weights = 'roberta-base'
     tokenizer_file = 'roberta_roberta-base.pickle'
     tokenizer = self._get_tokenzier(tokenizer_file)
     text, inputs, inputs_onnx = self._prepare_inputs(tokenizer)
     config = RobertaConfig()
     model = TFRobertaForMaskedLM(config)
     predictions = model.predict(inputs)
     onnx_model = keras2onnx.convert_keras(model, model.name)
     self.assertTrue(
         run_onnx_runtime(onnx_model.graph.name, onnx_model, inputs_onnx, predictions, self.model_files, rtol=1.e-2,
                          atol=1.e-4))
Example #2
0
 def test_TFRobertaForMaskedLM(self):
     from transformers import RobertaTokenizer, TFRobertaForMaskedLM
     pretrained_weights = 'roberta-base'
     tokenizer = RobertaTokenizer.from_pretrained(pretrained_weights)
     text, inputs, inputs_onnx = self._prepare_inputs(tokenizer)
     model = TFRobertaForMaskedLM.from_pretrained(pretrained_weights)
     predictions = model.predict(inputs)
     onnx_model = keras2onnx.convert_keras(model, model.name)
     self.assertTrue(
         run_onnx_runtime(onnx_model.graph.name,
                          onnx_model,
                          inputs_onnx,
                          predictions,
                          self.model_files,
                          rtol=1.e-2,
                          atol=1.e-4))
Example #3
0
def get_pretrained_roberta(pretrained_model):
    # NOTE: This will be pretrained unlike our analogous method for bert.
    return TFRobertaForMaskedLM.from_pretrained(
        pretrained_model, from_pt=roberta.from_pt(pretrained_model))