# -*- coding: utf-8 -*- """ @author: mwahdan """ from dialognlu import TransformerNLU from dialognlu.readers.goo_format_reader import Reader # model_path = "../saved_models/joint_distilbert_model" model_path = "../saved_models/joint_trans_xlnet_model" print("Loading model ...") nlu = TransformerNLU.load(model_path) print("Loading dataset ...") test_path = "../data/snips/test" test_dataset = Reader.read(test_path) print("Evaluating model ...") token_f1_score, tag_f1_score, report, acc = nlu.evaluate(test_dataset) print('Slot Classification Report:', report) print('Slot token f1_score = %f' % token_f1_score) print('Slot tag f1_score = %f' % tag_f1_score) print('Intent accuracy = %f' % acc)
# -*- coding: utf-8 -*- """ @author: mwahdan """ from dialognlu import BertNLU from dialognlu.readers.goo_format_reader import Reader train_path = "../data/snips/train" val_path = "../data/snips/valid" train_dataset = Reader.read(train_path) val_dataset = Reader.read(val_path) save_path = "../saved_models/joint_bert_model" epochs = 1 #3 batch_size = 64 config = {"model_type": "bert"} nlu = BertNLU.from_config(config) nlu.train(train_dataset, val_dataset, epochs, batch_size) print("Saving ...") nlu.save(save_path) print("Done")
args = parser.parse_args() train_data_folder_path = args.train val_data_folder_path = args.val save_folder_path = args.save epochs = args.epochs batch_size = args.batch start_model_folder_path = args.model pretrained_model_name_or_path = args.trans from_pt = args.from_pt cache_dir = args.cache_dir if start_model_folder_path is None and pretrained_model_name_or_path is None: raise argparse.ArgumentTypeError( "Either --model OR --trans should be provided") print('Reading data ...') train_dataset = Reader.read(train_data_folder_path) val_dataset = Reader.read(val_data_folder_path) if start_model_folder_path is None: config = { "cache_dir": cache_dir, "pretrained_model_name_or_path": pretrained_model_name_or_path, "from_pt": from_pt, "num_bert_fine_tune_layers": 10, "intent_loss_weight": 1.0, "slots_loss_weight": 3.0, } nlu = TransformerNLU.from_config(config) else: nlu = TransformerNLU.load(start_model_folder_path)
required=True) parser.add_argument('--data', '-d', help='Path to data in Goo et al format', type=str, required=True) parser.add_argument('--batch', '-bs', help='Batch size', type=int, default=128, required=False) args = parser.parse_args() model_path = args.model data_folder_path = args.data batch_size = args.batch print("Loading model ...") nlu = AutoNLU.load(model_path) print("Loading dataset ...") test_dataset = Reader.read(data_folder_path) print("Evaluating model ...") token_f1_score, tag_f1_score, report, acc = nlu.evaluate(test_dataset) print('Slot Classification Report:', report) print('Slot token f1_score = %f' % token_f1_score) print('Slot tag f1_score = %f' % tag_f1_score) print('Intent accuracy = %f' % acc)