def main(generator): trn = generator.dataset tst = YelpDataset(args.test_data, vocab) trainer = BaseSeqTrainer(generator, trn, generator.device, args) if args.save_ans: if args.load: print(trainer.evaluate(tst, args.load)) else: logger.error("No model parameter configured!") exit(0) else: trainer.train(tst, args.load)
def main(generator): logging.basicConfig( level=logging.INFO, format='%(asctime)s - %(name)s - %(levelname)s - %(message)s', handlers=[ logging.FileHandler("{0}/{1}.log".format("log", args.logger)), logging.StreamHandler() ]) logger.info('logger start') trn = generator.dataset tst = YelpDataset(args.test_data, vocab) trainer = Seq2SeqTrainer(generator, trn, generator.device, args) if args.save_ans: if args.load: print(trainer.evaluate(tst, args.load)) else: logger.error("No model parameter configured!") exit(0) else: trainer.train(tst, args.load)
def main(generator): logging.basicConfig( level=logging.INFO, format='%(asctime)s - %(name)s - %(levelname)s - %(message)s', handlers=[ logging.FileHandler("{0}/{1}.log".format("log", args.logger)), logging.StreamHandler() ]) logger = logging.getLogger(__name__) logger.info('logger start') g = generator dataset = g.data_set test_dataset = YelpDataset(args.test_data, dataset.vocab, args.test_tree) logger.info("start network") trainer = GeneratorTrainer(g, dataset, g.device, args) if args.save_ans: if args.load: trainer.evaluate(test_dataset, args.load) else: logger.error("No model parameter configured!") exit(0) else: trainer.train(test_dataset, args.load)
model = model.cuda() print(args) I = torch.zeros(args.batch_size, args.attention_hops, args.attention_hops) for i in range(args.batch_size): for j in range(args.attention_hops): I.data[i][j][j] = 1 if args.cuda: I = I.cuda() criterion = nn.CrossEntropyLoss() print('Begin to load data.') import joblib if args.load: model.load_state_dict(torch.load(args.load)) if args.cuda: model = model.cuda() print('-' * 89) if args.model == 'tree': data_val = YelpDataset(args.test_data, vocab, args.test_tree) cw_tree_attack(data_val) elif args.model == 'random_attack': data_val = YelpDataset(args.test_data, vocab) print("random attack!") cw_rand_words_attack(data_val) else: data_val = YelpDataset(args.test_data, vocab) cw_word_attack(data_val)
print(trainer.evaluate(tst, args.load)) else: logger.error("No model parameter configured!") exit(0) else: trainer.train(tst, args.load) if __name__ == '__main__': args = get_args() PAD_WORD = '<pad>' UNK_WORD = '<unk>' EOS_WORD = '<eos>' SOS_WORD = '<sos>' vocab = Vocab(filename=args.dictionary, data=[PAD_WORD, UNK_WORD, EOS_WORD, SOS_WORD]) PAD = vocab.getIndex(PAD_WORD) device = torch.device("cuda:0" if args.cuda else "cpu") print('Loading word vectors from', args.word_vector) embed = torch.load(args.word_vector) trn = YelpDataset(args.train_data, vocab) encoder = EncoderRNN(vocab, embed.size(1), args.nhid, device) decoder = Decoder(embed.size(1), args.nhid, vocab.size()) generator = Seq2SeqGenerator(encoder, decoder, embed=embed, dataset=trn).to(device) main(generator)
logger.info("start network") trainer = GeneratorTrainer(g, dataset, g.device, args) if args.save_ans: if args.load: trainer.evaluate(test_dataset, args.load) else: logger.error("No model parameter configured!") exit(0) else: trainer.train(test_dataset, args.load) if __name__ == '__main__': args = get_args() PAD_WORD = '<pad>' UNK_WORD = '<unk>' EOS_WORD = '<eos>' SOS_WORD = '<sos>' vocab = Vocab(filename=args.dictionary, data=[PAD_WORD, UNK_WORD, EOS_WORD, SOS_WORD]) PAD = vocab.getIndex(PAD_WORD) device = torch.device("cuda:0" if args.cuda else "cpu") print('Loading word vectors from', args.word_vector) embed = torch.load(args.word_vector) trn = YelpDataset(args.train_data, vocab, args.train_tree) generator = Generator(args.train_data, vocab, embed, trn) main(generator)