def test_transformer_encoder_save_and_load(tmp_path): layer = layer_module.BertEncoder() inputs = [ tf.keras.Input(shape=(500,), dtype=tf.int64), tf.keras.Input(shape=(500,), dtype=tf.int64), tf.keras.Input(shape=(500,), dtype=tf.int64), ] model = tf.keras.Model(inputs, layer(inputs)) model.save(os.path.join(tmp_path, "model")) tf.keras.models.load_model(os.path.join(tmp_path, "model"))
def build(self, hp, inputs=None): input_tensor = nest.flatten(inputs)[0] tokenizer_layer = keras_layers.BertTokenizer( max_sequence_length=utils.add_to_hp(self.max_sequence_length, hp)) output_node = tokenizer_layer(input_tensor) bert_encoder = keras_layers.BertEncoder() output_node = bert_encoder(output_node) bert_encoder.load_pretrained_weights() return output_node
def build(self, hp, inputs=None): input_tensor = nest.flatten(inputs)[0] max_sequence_length = self.max_sequence_length or hp.Choice( "max_seq_len", [128, 256, 512], default=128) tokenizer_layer = keras_layers.BertTokenizer( max_sequence_length=max_sequence_length) output_node = tokenizer_layer(input_tensor) bert_encoder = keras_layers.BertEncoder() output_node = bert_encoder(output_node) bert_encoder.load_pretrained_weights() return output_node