Beispiel #1
0
    def __init__(self, config, **kwargs):
        super().__init__(**kwargs)

        self.dense = tf.keras.layers.Dense(
            config.hidden_size, kernel_initializer=get_initializer(config.initializer_range), name="dense")
        self.dense_prediction = tf.keras.layers.Dense(
            1, kernel_initializer=get_initializer(config.initializer_range), name="dense_prediction")
        self.config = config
Beispiel #2
0
    def __init__(self, config, *inputs, **kwargs):
        super().__init__(*inputs, **kwargs)
        self.dense_0 = tf.keras.layers.Dense(
            config.hidden_size, kernel_initializer=get_initializer(config.initializer_range),
            name="pooler_answer_class_dense_0"
        )

        self.activation = tf.keras.layers.Activation('tanh')
        self.dense_1 = tf.keras.layers.Dense(
            1, use_bias=False, kernel_initializer=get_initializer(config.initializer_range),
            name="pooler_answer_class_dense_1"
        )
Beispiel #3
0
    def __init__(self, config, *inputs, **kwargs):
        super().__init__(*inputs, **kwargs)
        self.dense_0 = tf.keras.layers.Dense(
            config.hidden_size, kernel_initializer=get_initializer(config.initializer_range),
            name="end_logit_pooler_dense_0"
        )

        self.activation = tf.keras.layers.Activation('tanh')  # nn.Tanh()
        self.LayerNorm = tf.keras.layers.LayerNormalization(axis=-1, epsilon=config.layer_norm_eps,
                                                            name="end_logit_pooler_LayerNorm")
        self.dense_1 = tf.keras.layers.Dense(
            1, kernel_initializer=get_initializer(config.initializer_range), name="end_logit_pooler_dense_1"
        )
Beispiel #4
0
    def __init__(self, config, **kwargs):
        super().__init__(config, **kwargs)

        self.electra = TFElectraMainLayer(config, name="electra")
        self.dropout = tf.keras.layers.Dropout(config.hidden_dropout_prob)
        self.classifier = tf.keras.layers.Dense(
            config.num_labels, kernel_initializer=get_initializer(config.initializer_range), name="classifier")
Beispiel #5
0
 def build(self, input_shape):
     """Build shared word embedding layer """
     with tf.name_scope("word_embeddings"):
         # Create and initialize weights. The random normal initializer was chosen
         # arbitrarily, and works well.
         self.word_embeddings = self.add_weight(
             "weight",
             shape=[self.vocab_size, self.embedding_size],
             initializer=get_initializer(self.initializer_range),
         )
     super().build(input_shape)
Beispiel #6
0
    def __init__(self, config, **kwargs):
        super().__init__(**kwargs)
        self.vocab_size = config.vocab_size
        self.embedding_size = config.embedding_size
        self.initializer_range = config.initializer_range

        self.position_embeddings = tf.keras.layers.Embedding(
            config.max_position_embeddings,
            config.embedding_size,
            embeddings_initializer=get_initializer(self.initializer_range),
            name="position_embeddings",
        )
        self.token_type_embeddings = tf.keras.layers.Embedding(
            config.type_vocab_size,
            config.embedding_size,
            embeddings_initializer=get_initializer(self.initializer_range),
            name="token_type_embeddings",
        )

        # self.LayerNorm is not snake-cased to stick with TensorFlow model variable name and be able to load
        # any TensorFlow checkpoint file
        self.LayerNorm = tf.keras.layers.LayerNormalization(epsilon=config.layer_norm_eps, name="LayerNorm")
        self.dropout = tf.keras.layers.Dropout(config.hidden_dropout_prob)
Beispiel #7
0
    def __init__(self, config, shared_embeddings=False, input_embeddings=None, **kwargs):
        super().__init__(config, **kwargs)

        if shared_embeddings and input_embeddings is not None:
            self.embeddings = input_embeddings
        else:
            self.embeddings = TFElectraEmbeddings(config, name="embeddings")

        if config.embedding_size != config.hidden_size:
            self.embeddings_project = tf.keras.layers.Dense(
                config.hidden_size,
                kernel_initializer=get_initializer(config.initializer_range),
                name="embeddings_project")
        self.encoder = TFBertEncoder(config, name="encoder")
        self.config = config
Beispiel #8
0
    def __init__(self, config, args):
        super().__init__(config, args)

        self.start_n_top = args.beam_size  # config.start_n_top
        self.end_n_top = args.beam_size  # config.end_n_top
        self.joint_head = args.joint_head
        self.v2 = args.version_2_with_negative
        self.electra = TFElectraMainLayer(config, name="electra")
        self.num_hidden_layers = config.num_hidden_layers
        self.amp = config.amp

        ##old head
        if not self.joint_head:
            self.qa_outputs = tf.keras.layers.Dense(
                2, kernel_initializer=get_initializer(config.initializer_range), name="qa_outputs")
        else:
            self.start_logits = TFPoolerStartLogits(config, name='start_logits')
            self.end_logits = TFPoolerEndLogits(config, name='end_logits')
            if self.v2:
                self.answer_class = TFPoolerAnswerClass(config, name='answer_class')
Beispiel #9
0
 def __init__(self, config, *inputs, **kwargs):
     super().__init__(*inputs, **kwargs)
     self.dense = tf.keras.layers.Dense(
         1, kernel_initializer=get_initializer(config.initializer_range), name="start_logit_pooler_dense"
     )
Beispiel #10
0
    def __init__(self, config, **kwargs):
        super().__init__(**kwargs)

        self.LayerNorm = tf.keras.layers.LayerNormalization(epsilon=config.layer_norm_eps, name="LayerNorm")
        self.dense = tf.keras.layers.Dense(
            config.embedding_size, kernel_initializer=get_initializer(config.initializer_range), name="dense")