domain=hess_all_reg_domain, # Box-constraints of the problem initial_design_numdata=0, # Number data initial design initial_design_type="random", acquisition_optimizer_type='lbfgs', acquisition_type='EI', # Expected Improvement ‘EI’ exact_feval=False, # True evaluations, no sample noise maximize=False) #%% myBopt.X = np.array([[-1.3, -0.5, -2.34, -4, -3, 0.7], [-1.3, -0.5, -2.34, -4, -3, 1.0], [-1.3, -0.5, -2.34, -4, -3, 1.3], [-1, -0.5, -2.34, -4, -3, 0.7], [-1.3, -1, -2.34, -4, -3, 0.7], [-1.3, -0.5, -2.34, -3, -2, 0.7], [-1.8, -0.5, -2.34, -4, -3, 0.7]]) myBopt.Y = np.array([[dsim], [dsim2], [dsim3], [dsim4], [dsim5], [dsim6], [dsim7]]) #%% 40 mins for 48 iterations max_iter = 1000 ## maximum number of iterations max_time = 36000 ## maximum allowed time eps = 1e-4 ## tolerance, max distance between consicutive evaluations. myBopt.run_optimization(max_iter=max_iter, max_time=max_time, eps=eps, verbosity=True) np.savez(join(savedir, "BigGAN_Hess_Adam_optim_BO_tune600.npz"), X=myBopt.X, Y=myBopt.Y, Y_best=myBopt.Y_best, domain=hess_all_reg_domain) scores_short_tab = pd.DataFrame(
{'name': 'momentum1', 'type': 'continuous', 'domain': (0, 0.9),'dimensionality': 1}, {'name': 'momentum2', 'type': 'continuous', 'domain': (0, 0.9),'dimensionality': 1},] savedir = r"E:\OneDrive - Washington University in St. Louis\BigGAN_invert" #%% %%time myBopt = BayesianOptimization(f=optim_BigGAN, # Objective function domain=mixed_domain, # Box-constraints of the problem initial_design_numdata = 0, # Number data initial design initial_design_type="random", acquisition_optimizer_type='lbfgs', acquisition_type='LCB', # Expected Improvement ‘EI’ exact_feval = False, # True evaluations, no sample noise maximize=False) #%% myBopt.X = np.array([[-1.5, -2.5, -3, -3, 0.8, 0.5], [-1.5, -2.5, -3.5, -3.5, 0.8, 0.5]]) myBopt.Y = np.array([[1.666],[1.790]]) #%% 40 mins for 48 iterations max_iter = 900 ## maximum number of iterations max_time = 30000 ## maximum allowed time eps = 1e-4 ## tolerance, max distance between consicutive evaluations. myBopt.run_optimization(max_iter=max_iter, max_time=max_time, eps=eps, verbosity=True) np.savez(join(savedir, "BigGAN_SGD_optim_BO_tune300.npz"), X=myBopt.X, Y=myBopt.Y, Y_best=myBopt.Y_best, domain=mixed_domain) scores_short_tab = pd.DataFrame(np.append(myBopt.X, myBopt.Y, 1), columns=["lr1","lr2","wd1","wd2","mom1","mom2","scores"]) scores_short_tab.to_csv(join(savedir, "BigGAN_SGD_optim_BO_tune300.csv")) # %% #%% Using Hessian to Pre-condition the latent space #%% noise_init = torch.from_numpy(truncated_noise_sample(1, 128)).cuda()