print('train: ', len(train_data.dataset)) print('test: ', len(test_data.dataset)) def cuda_tensors(obj): for attr in dir(obj): value = getattr(obj, attr) if isinstance(value, torch.Tensor): setattr(obj, attr, value.cuda()) encA = EncoderA(args.wseed, zShared_dim=args.n_shared) decA = DecoderA(args.wseed, zShared_dim=args.n_shared) encB = EncoderB(args.wseed, zShared_dim=args.n_shared) decB = DecoderB(args.wseed, zShared_dim=args.n_shared) if CUDA: encA.cuda() decA.cuda() encB.cuda() decB.cuda() cuda_tensors(encA) cuda_tensors(decA) cuda_tensors(encB) cuda_tensors(decB) optimizer = torch.optim.Adam( list(encB.parameters()) + list(decB.parameters()) + list(encA.parameters()) + list(decA.parameters()), lr=args.lr) # # def elbo(q, pA, pB, lamb=1.0, annealing_factor=1.0):
encB = EncoderAttr(args.wseed, zShared_dim=args.n_shared, num_hidden=args.num_hidden) decB = DecoderAttr(args.wseed, zShared_dim=args.n_shared, num_hidden=args.num_hidden) ae_encA = EncoderA(0) ae_decA = DecoderA2(0) if CUDA: encA.cuda() decA.cuda() encB.cuda() decB.cuda() ae_encA.cuda() ae_decA.cuda() cuda_tensors(encA) cuda_tensors(decA) cuda_tensors(encB) cuda_tensors(decB) cuda_tensors(ae_encA) cuda_tensors(ae_decA) if args.ckpt_epochs > 0: if CUDA: encA = torch.load('%s/%s-encA_epoch%s.rar' % (args.ckpt_path, MODEL_NAME, args.ckpt_epochs)) encB = torch.load('%s/%s-encB_epoch%s.rar' % (args.ckpt_path, MODEL_NAME, args.ckpt_epochs)) decA = torch.load('%s/%s-decA_epoch%s.rar' %