def _construct(self, **kwargs): super()._construct(**kwargs) self.embeddings_layer = BertEmbeddingsLayer.from_params( self.params, name="embeddings") # create all transformer encoder sub-layers self.encoders_layer = TransformerEncoderLayer.from_params( self.params, name="encoder") self.support_masking = True
def build(self, input_shape): if isinstance(input_shape, list): assert len(input_shape) == 2 input_ids_shape, token_type_ids_shape = input_shape self.input_spec = [ keras.layers.InputSpec(shape=input_ids_shape), keras.layers.InputSpec(shape=token_type_ids_shape) ] else: input_ids_shape = input_shape self.input_spec = keras.layers.InputSpec(shape=input_ids_shape) self.embeddings_layer = BertEmbeddingsLayer.from_params(self.params, name="embeddings") # create all transformer encoder sub-layers self.encoders_layer = TransformerEncoderLayer.from_params(self.params, name="encoder") super(BertModelLayer, self).build(input_shape)
def test_embeddings_layer(): tf.get_logger().setLevel(logging.INFO) layer = BertEmbeddingsLayer() mask = layer.compute_mask(inputs=np.array([1, 2, 0, 0, 1])) print('mask: {}'.format(tf.cast(mask, tf.float32) * 0.1))