Esempio n. 1
0
                    validation_data=(x_val, y_val),
                    batch_size=512,
                    epochs=1000,
                    shuffle=True,
                    callbacks=[es, mc])
print('trained embedding shape:', model.layers[0].get_weights()[0].shape)
utils.save_embs_2_file(model, 0, tokenizer.word_index)

# test_loss, test_acc = model.evaluate(x_test, y_test)
# print('test loss:', test_loss, 'test acc:', test_acc)
gold_en = y_test
predicted_en = model.predict(x_test).argmax(axis=1)
gold_de = y_test_de
predicted_de = model.predict(x_test_de).argmax(axis=1)

utils.test_evaluation(gold_en, predicted_en, gold_de, predicted_de)

# de fine-tuning
FINETUNE = False
if FINETUNE:
    print('performing classical fine-tuning...')
    print('train:', de_train_dir)
    print('dev:', de_dev_dir)
    model2 = models.load_model('best_model.h5', compile=False)
    # model2.layers[0].trainable = True
    Adam = optimizers.Adam(learning_rate=0.0005)
    model2.compile(optimizer='adam',
                   loss='sparse_categorical_crossentropy',
                   metrics=['acc'])
    print(model2.summary())
    print(K.eval(model2.optimizer.lr))
Esempio n. 2
0
                         save_best_only=True)
    model2.fit(x_train,
               y_train,
               validation_data=(x_val, y_val),
               batch_size=64,
               epochs=100,
               shuffle=True,
               callbacks=[es, mc])
    print('trained embedding shape:', model.layers[0].get_weights()[0].shape)

    gold_en = y_test
    predicted_en = model.predict(x_test).argmax(axis=1)
    gold_de = y_test_de
    predicted_de = model.predict(x_test_de).argmax(axis=1)

    en_mic, de_mic, en_mac, de_mac = utils.test_evaluation(
        gold_en, predicted_en, gold_de, predicted_de)
    global_en_mic_train += en_mic
    global_de_mic_train += de_mic
    global_en_mac_train += en_mac
    global_de_mac_train += de_mac

    # de fine-tuning
    FINETUNE = True
    if FINETUNE:
        print('performing classical fine-tuning...')
        print('train:', de_train_dir)
        print('dev:', de_dev_dir)
        print('fine-tuning architecture...')
        model3 = models.load_model('best_model.h5', compile=False)
        model3.layers[0].trainable = False  # freeze EmbLayer
        for layer in model3.layers[1:]:
Esempio n. 3
0
                     lr_d=1e-10,
                     units=128,
                     spatial_dr=0.5,
                     kernel_size1=4,
                     kernel_size2=4,
                     dense_units=64,
                     dr=0.2,
                     conv_size=32)

# test_loss, test_acc = model.evaluate(x_test, y_test)
# print('test loss:', test_loss, 'test acc:', test_acc)
gold = y_test
predicted = model.predict(x_test).argmax(axis=1)
gold2 = y_test2
predicted2 = model.predict(x_test2).argmax(axis=1)
utils.test_evaluation(gold, predicted)
utils.test_evaluation(gold2, predicted2)

# toy tests
toy_sents = tokenizer.texts_to_sequences([
    'the cat sat on the mat', 'what a great movie', 'better not again',
    'terrible, worst ever', 'best film ever', 'today is Tuesday'
])
toy_data = pad_sequences(toy_sents, maxlen=MAXLEN)
toy_gold = [1, 2, 0, 0, 2, 1]
prediction = model.predict(toy_data)
print(toy_gold)
print(prediction.argmax(axis=1))

# plot results
# utils.plot(history)