def train(opt): # basics definition opt.experiment = os.path.join(root_dir, opt.experiment) if not os.path.exists(opt.experiment): os.makedirs(opt.experiment) opt.save_model = os.path.join(opt.experiment, opt.save_model) opt.log_path = os.path.join(opt.experiment, 'log.train') opt.logger = make_logger(opt.log_path) # dataIter definition train_iter = OneBestIter4STC(opt.data_root+'train', opt.word2idx, opt.class2idx, opt.batch_size, opt.cuda, True) valid_iter = OneBestIter4STC(opt.data_root+'valid', opt.word2idx, opt.class2idx, opt.batch_size, opt.cuda, False) # model definition model = make_model(opt) if opt.load_emb: emb = read_emb(opt.word2idx) model.emb.init_weight_from_pre_emb(emb, opt.fix_emb) print(model) # criterion definition criterion = nn.BCELoss(reduction='sum') if opt.cuda: criterion = criterion.cuda() # optimizer definition optimizer = Optim(opt.optim, opt.lr, max_grad_norm=opt.max_norm) optimizer.set_parameters(model.named_parameters()) print('Trainable parameter number: {}'.format(len(optimizer.params))) # training procedure trainer = OneBestTrainer4STC(model, criterion, optimizer, opt.logger) trainer.train(opt.epochs, train_iter, valid_iter, opt.save_model)
def train(opt): opt.experiment = os.path.join(root_dir, opt.experiment) if not os.path.exists(opt.experiment): os.makedirs(opt.experiment) opt.save_model = os.path.join(opt.experiment, opt.save_model) opt.log_path = os.path.join(opt.experiment, 'log.train') opt.logger = make_logger(opt.log_path) # memory info print("encoder word2idx number: {}".format(opt.enc_word_vocab_size)) print("decoder word2idx number: {}".format(opt.dec_word_vocab_size)) print("act2idx number: {}".format(opt.act_vocab_size)) print("slot2idx number: {}".format(opt.slot_vocab_size)) # Model definition model = make_model(opt) if opt.load_word_emb: emb = read_emb(opt.memory['enc2idx']) model.enc_word_emb.init_weight_from_pre_emb(emb, opt.fix_word_emb) if opt.load_class_emb: emb = opt.memory['act_emb'] model.act_emb.init_weight_from_pre_emb(emb, opt.fix_class_emb) emb = opt.memory['slot_emb'] model.slot_emb.init_weight_from_pre_emb(emb, opt.fix_class_emb) if opt.share_param: #model.value_decoder.outlin.weight.data = model.word_emb.embedding.weight.data #model.value_decoder.outlin.weight.requires_grad = model.word_emb.embedding.weight.requires_grad model.act_stc.lin.weight.data = model.act_emb.embedding.weight.data model.act_stc.lin.weight.requires_grad = model.act_emb.embedding.weight.requires_grad model.slot_stc.lin.weight.data = model.slot_emb.embedding.weight.data model.slot_stc.lin.weight.requires_grad = model.slot_emb.embedding.weight.requires_grad if opt.cuda: model = model.cuda() print(model) # optimizer details optimizer = Optim(opt.optim, opt.lr, max_grad_norm=opt.max_norm) optimizer.set_parameters(model.named_parameters()) print("training parameters number: {}".format(len(optimizer.params))) # loss definition #stc_criterion = MaskedBCELoss(opt.cuda) stc_criterion = nn.BCELoss(reduction='sum') nll_criterion = nn.NLLLoss(reduction='sum') if opt.cuda: stc_criterion = stc_criterion.cuda() nll_criterion = nll_criterion.cuda() # training procedure if opt.task == 'slu': data_iter = SLUDataset(opt.data_root + 'train', opt.memory, opt.cuda, True) trainer = SLUTrainer(model, (stc_criterion, nll_criterion), optimizer, opt.logger, cuda=opt.cuda) elif opt.task == 'act': data_iter = ActDataset(opt.data_root + 'train', opt.memory, opt.cuda, True) trainer = ActTrainer(model, stc_criterion, optimizer, opt.logger, cuda=opt.cuda) elif opt.task == 'slot': data_iter = SlotDataset(opt.data_root + 'train', opt.memory, opt.cuda, True) trainer = SlotTrainer(model, stc_criterion, optimizer, opt.logger, cuda=opt.cuda) elif opt.task == 'value': data_iter = ValueDataset(opt.data_root + 'train', opt.memory, opt.cuda, True) trainer = ValueTrainer(model, nll_criterion, optimizer, opt.logger, cuda=opt.cuda) trainer.train(opt.epochs, opt.batch_size, opt.memory, data_iter, opt.data_root + 'valid', opt.save_model)