def test_TFRobertaForMaskedLM(self):
     from transformers import RobertaTokenizer, TFRobertaForMaskedLM
     pretrained_weights = 'roberta-base'
     tokenizer = RobertaTokenizer.from_pretrained(pretrained_weights)
     text, inputs, inputs_onnx = self._prepare_inputs(tokenizer)
     model = TFRobertaForMaskedLM.from_pretrained(pretrained_weights)
     predictions = model.predict(inputs)
     onnx_model = keras2onnx.convert_keras(model, model.name)
     self.assertTrue(
         run_onnx_runtime(onnx_model.graph.name,
                          onnx_model,
                          inputs_onnx,
                          predictions,
                          self.model_files,
                          rtol=1.e-2,
                          atol=1.e-4))
Exemple #2
0
def get_pretrained_roberta(pretrained_model):
    # NOTE: This will be pretrained unlike our analogous method for bert.
    return TFRobertaForMaskedLM.from_pretrained(
        pretrained_model, from_pt=roberta.from_pt(pretrained_model))