torch.set_default_dtype(argPrec) cvt = lambda x: x.type(argPrec).to(device, non_blocking=True) # load model logger.info(' ') logger.info("loading model: {:}".format(args.resume)) logger.info(' ') checkpt = torch.load(args.resume, map_location=lambda storage, loc: storage) m = checkpt['args'].m alph = checkpt['args'].alph nTh = checkpt['args'].nTh data = checkpt['args'].data prob, x0, _, xInit = initProb(data, 10, 11, var0=0.5, alph=alph, cvt=cvt) d = x0.size(1) net = Phi(nTh=nTh, m=m, d=d, alph=alph) # the phi aka the value function net.load_state_dict(checkpt["state_dict"]) net = net.to(argPrec).to(device) nt = args.nt xtarget = prob.xtarget strTitle = 'eval_' + os.path.basename(args.resume)[:-12] with torch.no_grad(): net.eval() sPath = args.save + '/figs/' + strTitle + '.png'
print(i, err.item()) if nIters / 4 == 0: # lower lr lr = lr * 0.1 print('lr: ', lr) return ubest if __name__ == '__main__': alphG = args.alph[0] prob, _, _, xInit = initProb( args.data, 10, 10, var0=1.0, cvt=cvt, alph=[alphG, args.alph[1], args.alph[2], 0.0, 0.0, 0.0]) prob.train() d = xInit.numel() strTitle = 'baseline_' + args.data + '_{:}_{:}_{:}'.format( int(alphG), int(prob.alph_Q), int(prob.alph_W)) x0 = xInit # x0 can be more than one point traj = cvt(torch.zeros(x0.size(0), d, nt + 1)) h = 1. / nt for i in range(x0.size(0)): z0 = x0[i, :] if args.resume is not None: # load a previous model
str(args.gpu) if torch.cuda.is_available() else 'cpu') print('device: ', device) if __name__ == '__main__': torch.set_default_dtype(argPrec) cvt = lambda x: x.type(argPrec).to(device, non_blocking=True) n_train = args.n_train nVal = n_train alph = args.alph # set-up problem prob, x0, x0v, xInit = initProb(args.data, n_train, nVal, var0=args.var0, alph=alph, cvt=cvt) # set-up model d = x0.size(1) # dimension of the problem m = args.m nt = args.nt nt_val = args.nt_val nTh = args.nTh tspan = [0.0, 1.0] # throughout we solve on [ 0 , T=1 ] net = Phi(nTh=nTh, d=d, m=m, alph=alph) net = net.to(argPrec).to(device) # resume training on a model that's already had some training