Пример #1
0
# -*- coding: utf-8 -*-
"""
@author: mwahdan
"""

from dialognlu import TransformerNLU
from dialognlu.readers.goo_format_reader import Reader


# model_path = "../saved_models/joint_distilbert_model"
model_path = "../saved_models/joint_trans_xlnet_model"

print("Loading model ...")
nlu = TransformerNLU.load(model_path)

print("Loading dataset ...")
test_path = "../data/snips/test"
test_dataset = Reader.read(test_path)

print("Evaluating model ...")
token_f1_score, tag_f1_score, report, acc = nlu.evaluate(test_dataset)

print('Slot Classification Report:', report)
print('Slot token f1_score = %f' % token_f1_score)
print('Slot tag f1_score = %f' % tag_f1_score)
print('Intent accuracy = %f' % acc)
Пример #2
0
from dialognlu.readers.goo_format_reader import Reader
import time


num_process = 2


# model_path = "../saved_models/joint_distilbert_model"
# model_path = "../saved_models/joint_trans_bert_model"
# model_path = "../saved_models/joint_trans_albert_model"
# model_path = "../saved_models/joint_trans_roberta_model"
model_path = "../saved_models/joint_trans_xlnet_model"


print("Loading model ...")
nlu = TransformerNLU.load(model_path, quantized=True, num_process=num_process)

print("Loading dataset ...")
test_path = "../data/snips/test"
test_dataset = Reader.read(test_path)

print("Evaluating model ...")
t1 = time.time()
token_f1_score, tag_f1_score, report, acc = nlu.evaluate(test_dataset)
t2 = time.time()

print('Slot Classification Report:', report)
print('Slot token f1_score = %f' % token_f1_score)
print('Slot tag f1_score = %f' % tag_f1_score)
print('Intent accuracy = %f' % acc)
Пример #3
0
from_pt = args.from_pt
cache_dir = args.cache_dir
if start_model_folder_path is None and pretrained_model_name_or_path is None:
    raise argparse.ArgumentTypeError(
        "Either --model OR --trans should be provided")

print('Reading data ...')
train_dataset = Reader.read(train_data_folder_path)
val_dataset = Reader.read(val_data_folder_path)

if start_model_folder_path is None:
    config = {
        "cache_dir": cache_dir,
        "pretrained_model_name_or_path": pretrained_model_name_or_path,
        "from_pt": from_pt,
        "num_bert_fine_tune_layers": 10,
        "intent_loss_weight": 1.0,
        "slots_loss_weight": 3.0,
    }
    nlu = TransformerNLU.from_config(config)
else:
    nlu = TransformerNLU.load(start_model_folder_path)

print("Training ...")
nlu.train(train_dataset, val_dataset, epochs, batch_size)

print("Saving ...")
nlu.save(save_folder_path)
print("Done")

tf.compat.v1.reset_default_graph()
Пример #4
0
# save_path = "../saved_models/joint_trans_roberta_model"

# pretrained_model_name_or_path = "albert-base-v1"
# save_path = "../saved_models/joint_trans_albert_model"

# pretrained_model_name_or_path = "bert-base-uncased"
# save_path = "../saved_models/joint_trans_bert_model"

# pretrained_model_name_or_path = "distilbert-base-uncased"
# save_path = "../saved_models/joint_distilbert_model"

epochs = 5
batch_size = 32  # 64

config = {
    "cache_dir": "/media/mwahdan/Data/transformers",
    "pretrained_model_name_or_path": pretrained_model_name_or_path,
    "from_pt": False,
    "num_bert_fine_tune_layers": 10,
    "intent_loss_weight": 1.0,  #0.2,
    "slots_loss_weight": 3.0,  #2.0,
    "max_length":
    64,  # You can set max_length (recommended) or leave it and it will be computed automatically based on longest training example
}

nlu = TransformerNLU.from_config(config)
nlu.train(train_dataset, val_dataset, epochs, batch_size)

print("Saving ...")
nlu.save(save_path, save_tflite=True, conversion_mode="normal")
print("Done")