Exemplo n.º 1
0
    def test_lm_generate_distilgpt2(self):
        model = TFGPT2LMHeadModel.from_pretrained("distilgpt2")
        input_ids = tf.convert_to_tensor([[464, 1893]],
                                         dtype=tf.int32)  # The president
        expected_output_ids = [
            464,
            1893,
            286,
            262,
            1578,
            1829,
            11,
            290,
            262,
            1893,
            286,
            262,
            1578,
            7526,
            11,
            423,
            587,
            287,
            262,
            2635,
        ]  # The president of the United States, and the president of the United Kingdom, have been in the White

        output_ids = model.generate(input_ids, do_sample=False)
        self.assertListEqual(output_ids[0].numpy().tolist(),
                             expected_output_ids)
Exemplo n.º 2
0
 def test_lm_generate_gpt2(self):
     model = TFGPT2LMHeadModel.from_pretrained("gpt2")
     input_ids = tf.convert_to_tensor([[464, 3290]],
                                      dtype=tf.int32)  # The dog
     expected_output_ids = [
         464,
         3290,
         373,
         1043,
         287,
         257,
         2214,
         1474,
         262,
         16246,
         286,
         2688,
         290,
         2688,
         27262,
         13,
         198,
         198,
         464,
         3290,
     ]  # The dog was found in a field near the intersection of West and West Streets.\n\nThe dog
     output_ids = model.generate(input_ids, do_sample=False)
     self.assertListEqual(output_ids[0].numpy().tolist(),
                          expected_output_ids)
 def create_and_check_gpt2_lm_head(self, config, input_ids, input_mask, head_mask, token_type_ids, *args):
     model = TFGPT2LMHeadModel(config=config)
     inputs = {
         "input_ids": input_ids,
         "attention_mask": input_mask,
         "token_type_ids": token_type_ids,
     }
     result = model(inputs)
     self.parent.assertEqual(result.logits.shape, (self.batch_size, self.seq_length, self.vocab_size))
 def create_and_check_gpt2_lm_head(self, config, input_ids, input_mask, head_mask, token_type_ids, *args):
     model = TFGPT2LMHeadModel(config=config)
     inputs = {"input_ids": input_ids, "attention_mask": input_mask, "token_type_ids": token_type_ids}
     prediction_scores = model(inputs)[0]
     result = {
         "prediction_scores": prediction_scores.numpy(),
     }
     self.parent.assertListEqual(
         list(result["prediction_scores"].shape), [self.batch_size, self.seq_length, self.vocab_size]
     )
Exemplo n.º 5
0
            else:
                output_dict = outputs
        else:
            output_dict = {}
            for k, v in outputs.items():
                if format_columns is not None and k not in format_columns and not output_all_columns:
                    continue
                if format_columns is None or k in format_columns:
                    v = map_nested(command, v, **map_nested_kwargs)
                output_dict[k] = v
        return output_dict


ds = FixedDataset.from_file('../WRITTEN/dataset.arrow')
ds.set_format(type='tensorflow', columns=['input_ids'], shape=[2048])
mirrored_strategy = tf.distribute.MirroredStrategy(
    devices=["/gpu:0", "/gpu:1"])
with mirrored_strategy.scope():
    config_name = 'gpt2'
    model = TFGPT2LMHeadModel.from_pretrained(config_name)
    gpt2_weights_file_url = hf_bucket_url(config_name,
                                          filename=TF2_WEIGHTS_NAME)
    gpt2_weights_file = cached_path(gpt2_weights_file_url)
    model.load_weights(gpt2_weights_file, by_name=True)
    optimizer = tf.keras.optimizers.Adam(learning_rate=3e-5)
    loss = tf.keras.losses.SparseCategoricalCrossentropy(from_logits=True)
    model.compile(optimizer=optimizer, loss=loss)
    model.fit(tf.data.Dataset.from_tensor_slices(ds['input_ids']),
              epochs=2,
              steps_per_epoch=115)