Esempio n. 1
0
    def test_lm_generate_openai_gpt(self):
        model = TFOpenAIGPTLMHeadModel.from_pretrained("openai-gpt")
        input_ids = tf.convert_to_tensor([[481, 4735, 544]],
                                         dtype=tf.int32)  # the president is
        expected_output_ids = [
            481,
            4735,
            544,
            246,
            963,
            870,
            762,
            239,
            244,
            40477,
            244,
            249,
            719,
            881,
            487,
            544,
            240,
            244,
            603,
            481,
        ]  # the president is a very good man. " \n " i\'m sure he is, " said the

        output_ids = model.generate(input_ids, do_sample=False)
        self.assertListEqual(output_ids[0].numpy().tolist(),
                             expected_output_ids)
Esempio n. 2
0
 def create_and_check_openai_gpt_lm_head(self, config, input_ids, input_mask, head_mask, token_type_ids, *args):
     model = TFOpenAIGPTLMHeadModel(config=config)
     inputs = {"input_ids": input_ids, "attention_mask": input_mask, "token_type_ids": token_type_ids}
     prediction_scores = model(inputs)[0]
     result = {
         "prediction_scores": prediction_scores.numpy(),
     }
     self.parent.assertListEqual(
         list(result["prediction_scores"].shape), [self.batch_size, self.seq_length, self.vocab_size]
     )
Esempio n. 3
0
 def create_and_check_openai_gpt_lm_head(self, config, input_ids,
                                         input_mask, head_mask,
                                         token_type_ids, *args):
     model = TFOpenAIGPTLMHeadModel(config=config)
     inputs = {
         "input_ids": input_ids,
         "attention_mask": input_mask,
         "token_type_ids": token_type_ids
     }
     result = model(inputs)
     self.parent.assertEqual(
         result.logits.shape,
         (self.batch_size, self.seq_length, self.vocab_size))
def initialize():
    global model, tokenizer
    model = TFOpenAIGPTLMHeadModel.from_pretrained("openai-gpt")
    tokenizer = OpenAIGPTTokenizer.from_pretrained("openai-gpt")