Example #1
0
output = Lambda(lambda x: x[:, 0], name='CLS-token')(bert.output)
output = Dense(1, activation='sigmoid')(output)
model = Model(bert.input, output)
bt.lock_transformer_layers(bert, 10)

maxlen = 150

from imdb import *
(train_text, train_label), (test_text, test_label) = GetImdbData(maxlen, 3000)
train_inputs, test_inputs = map(lambda x: bt.convert_sentences(x, maxlen),
                                [train_text, test_text])

epochs = 3
batch_size = 32
total_steps = epochs * train_inputs[0].shape[0] // batch_size
optimizer = bt.get_suggested_optimizer(1e-4, total_steps=total_steps)
model.compile(loss='binary_crossentropy',
              optimizer=optimizer,
              metrics=['accuracy'])

model.fit(train_inputs,
          train_label,
          epochs=epochs,
          batch_size=batch_size,
          shuffle=True,
          validation_data=(test_inputs, test_label))

#Epoch 1/3
#94/94 [==============================] - 32s 337ms/step - loss: 0.4417 - accuracy: 0.7877 - val_loss: 0.3099 - val_accuracy: 0.8663
#Epoch 2/3
#94/94 [==============================] - 31s 329ms/step - loss: 0.2525 - accuracy: 0.8873 - val_loss: 0.3133 - val_accuracy: 0.8740
Example #2
0
mask = tf.keras.layers.Lambda(lambda x: K.greater(x, 0))(input_word_ids)
seq_output = MyMasking()([seq_output, mask])
pooled_output = tf.keras.layers.GlobalAveragePooling1D('channels_last')(
    seq_output)
x = tf.keras.layers.Dense(256, activation='relu')(pooled_output)
x = tf.keras.layers.Dropout(0.5)(x)
x = tf.keras.layers.Dense(1, activation='sigmoid')(x)

model = tf.keras.models.Model(inputs=input_word_ids, outputs=x)

#print(train_inputs[0])

epochs = 4
batch_size = 32
total_steps = epochs * train_inputs.shape[0] // batch_size
optimizer = bt.get_suggested_optimizer(init_lr=1e-4, total_steps=total_steps)
model.compile(loss='binary_crossentropy',
              optimizer=optimizer,
              metrics=['accuracy'])

#model.summary()

model.fit(train_inputs,
          train_label,
          epochs=epochs,
          batch_size=batch_size,
          shuffle=True,
          validation_data=(test_inputs, test_label))

#Epoch 1/10
#3000/3000 [==============================] - 39s 13ms/sample - loss: 0.5614 - accuracy: 0.7017 - val_loss: 0.3556 - val_accuracy: 0.8417