# Initialize Network encoder = models.encoder(isAddCostVolume=opt.isAddCostVolume) for param in encoder.parameters(): param.requires_grad = False encoder.load_state_dict( torch.load('{0}/encoder_{1}.pth'.format(opt.experiment, opt.nepoch - 1), map_location={'cuda:0': 'cuda:{0}'.format(opt.gpuId)})) decoder = models.decoder(isAddVisualHull=opt.isAddVisualHull) for param in decoder.parameters(): param.requires_grad = False decoder.load_state_dict( torch.load('{0}/decoder_{1}.pth'.format(opt.experiment, opt.nepoch - 1), map_location={'cuda:0': 'cuda:{0}'.format(opt.gpuId)})) normalFeature = models.normalFeature() for param in normalFeature.parameters(): param.requires_grad = False normalFeature.load_state_dict( torch.load('{0}/normalFeature_{1}.pth'.format(opt.experiment, opt.nepoch - 1), map_location={'cuda:0': 'cuda:{0}'.format(opt.gpuId)})) normalPool = Variable( torch.ones([1, angleNum * angleNum, 1, 1, 1], dtype=torch.float32)) normalPool.requires_grad = False if opt.isAddCostVolume and opt.poolingMode == 2: normalPool.data.copy_( torch.load('{0}/normalPool_{1}.pth'.format(opt.experiment, opt.nepoch - 1), map_location={'cuda:0': 'cuda:{0}'.format(opt.gpuId)}))
opt.seed = 0 print("Random Seed: ", opt.seed) random.seed(opt.seed) torch.manual_seed(opt.seed) if torch.cuda.is_available() and not opt.cuda: print( "WARNING: You have a CUDA device, so you should probably run with --cuda" ) #################################### # Initialize Network encoder = nn.DataParallel(models.encoder(isAddCostVolume=opt.isAddCostVolume), device_ids=opt.deviceIds) decoder = nn.DataParallel(models.decoder(), device_ids=opt.deviceIds) normalFeature = nn.DataParallel(models.normalFeature(), device_ids=opt.deviceIds) normalPool = Variable( torch.ones([1, angleNum * angleNum, 1, 1, 1], dtype=torch.float32)) ############## ###################### # Send things into GPU if opt.cuda: encoder = encoder.cuda() decoder = decoder.cuda() normalFeature = normalFeature.cuda() normalPool = normalPool.cuda() #################################### # Other modules renderer = models.renderer(eta1=opt.eta1,