torch.backends.cudnn.deterministic = True if not args.cuda: args.gpu = -1 if torch.cuda.is_available() and args.cuda: print('Note: You are using GPU for training') torch.cuda.set_device(args.gpu) torch.cuda.manual_seed(args.seed) if torch.cuda.is_available() and not args.cuda: print('Warning: You have Cuda but not use it. You are using CPU for training.') np.random.seed(args.seed) random.seed(args.seed) logger = get_logger() # Set up the data for training SST-1 if args.dataset == 'SST-1': train_iter, dev_iter, test_iter = SST1.iters(args.data_dir, args.word_vectors_file, args.word_vectors_dir, batch_size=args.batch_size, device=args.gpu, unk_init=UnknownWordVecCache.unk) # Set up the data for training SST-2 elif args.dataset == 'SST-2': train_iter, dev_iter, test_iter = SST2.iters(args.data_dir, args.word_vectors_file, args.word_vectors_dir, batch_size=args.batch_size, device=args.gpu, unk_init=UnknownWordVecCache.unk) elif args.dataset == 'Reuters': train_iter, dev_iter, test_iter = Reuters.iters(args.data_dir, args.word_vectors_file, args.word_vectors_dir, batch_size=args.batch_size, device=args.gpu, unk_init=UnknownWordVecCache.unk) else: raise ValueError('Unrecognized dataset') config = deepcopy(args) config.dataset = train_iter.dataset config.target_class = train_iter.dataset.NUM_CLASSES config.words_num = len(train_iter.dataset.TEXT_FIELD.vocab) print('Dataset {} Mode {}'.format(args.dataset, args.mode)) print('VOCAB num',len(train_iter.dataset.TEXT_FIELD.vocab))
print("Note: You are using GPU for training") torch.cuda.set_device(args.gpu) torch.cuda.manual_seed(args.seed) if torch.cuda.is_available() and not args.cuda: print( "Warning: You have Cuda but not use it. You are using CPU for training." ) np.random.seed(args.seed) random.seed(args.seed) logger = get_logger() # Set up the data for training SST-1 if args.dataset == 'SST-1': train_iter, dev_iter, test_iter = SST1.iters( args.data_dir, args.word_vectors_file, args.word_vectors_dir, batch_size=args.batch_size, device=args.gpu) # Set up the data for training SST-2 elif args.dataset == 'SST-2': train_iter, dev_iter, test_iter = SST2.iters( args.data_dir, args.word_vectors_file, args.word_vectors_dir, batch_size=args.batch_size, device=args.gpu) else: raise ValueError('Unrecognized dataset') config = deepcopy(args) config.dataset = train_iter.dataset