コード例 #1
0
    def create_and_check_bert_model(self, config, input_ids, token_type_ids,
                                    input_mask, sequence_labels, token_labels,
                                    choice_labels):
        model = TFBertModel(config=config)
        inputs = {
            "input_ids": input_ids,
            "attention_mask": input_mask,
            "token_type_ids": token_type_ids
        }
        sequence_output, pooled_output = model(inputs)

        inputs = [input_ids, input_mask]
        sequence_output, pooled_output = model(inputs)

        sequence_output, pooled_output = model(input_ids)

        result = {
            "sequence_output": sequence_output.numpy(),
            "pooled_output": pooled_output.numpy(),
        }
        self.parent.assertListEqual(
            list(result["sequence_output"].shape),
            [self.batch_size, self.seq_length, self.hidden_size])
        self.parent.assertListEqual(list(result["pooled_output"].shape),
                                    [self.batch_size, self.hidden_size])
コード例 #2
0
 def test_model_from_pretrained(self):
     cache_dir = "/tmp/transformers_test/"
     # for model_name in list(TF_BERT_PRETRAINED_MODEL_ARCHIVE_MAP.keys())[:1]:
     for model_name in ['bert-base-uncased']:
         model = TFBertModel.from_pretrained(model_name, cache_dir=cache_dir)
         shutil.rmtree(cache_dir)
         self.assertIsNotNone(model)
コード例 #3
0
    def create_and_check_bert_model(
        self, config, input_ids, token_type_ids, input_mask, sequence_labels, token_labels, choice_labels
    ):
        model = TFBertModel(config=config)
        inputs = {"input_ids": input_ids, "attention_mask": input_mask, "token_type_ids": token_type_ids}
        sequence_output, pooled_output = model(inputs)

        inputs = [input_ids, input_mask]
        result = model(inputs)

        result = model(input_ids)

        self.parent.assertEqual(result.last_hidden_state.shape, (self.batch_size, self.seq_length, self.hidden_size))
        self.parent.assertEqual(result.pooler_output.shape, (self.batch_size, self.hidden_size))
コード例 #4
0
    def __init__(self, config, *inputs, **kwargs):
        super().__init__(config, *inputs, **kwargs)

        self.num_layers = config.num_labels
        self.backbone = TFBertModel(config,
                                    *inputs,
                                    **kwargs,
                                    name="bert_backbone")

        self.dropout = tf.keras.layers.Dropout(0.2)
        self.dropout_multisampled = tf.keras.layers.Dropout(0.5)

        self.weighted_sum = WeightedSumLayer(config.num_hidden_layers)

        self.classifier = tf.keras.layers.Dense(
            config.num_labels,
            kernel_initializer=get_initializer(config.initializer_range),
            name="classifier")

        self.backbone.bert.pooler._trainable = False
コード例 #5
0
 def test_model_from_pretrained(self):
     # for model_name in TF_BERT_PRETRAINED_MODEL_ARCHIVE_LIST[:1]:
     for model_name in ["bert-base-uncased"]:
         model = TFBertModel.from_pretrained(model_name)
         self.assertIsNotNone(model)
コード例 #6
0
 def test_model_from_pretrained(self):
     # for model_name in list(TF_BERT_PRETRAINED_MODEL_ARCHIVE_MAP.keys())[:1]:
     for model_name in ["bert-base-uncased"]:
         model = TFBertModel.from_pretrained(model_name, cache_dir=CACHE_DIR)
         self.assertIsNotNone(model)
コード例 #7
0
 def test_model_from_pretrained(self):
     model = TFBertModel.from_pretrained("jplu/tiny-tf-bert-random")
     self.assertIsNotNone(model)