Exemple #1
0
 def sample_pareto_fronts(self, num_of_design_samples=5,
                          num_of_gp=5,
                          num_of_design_points=1000, verbose=False):
     """
     Samples a plaussible pareto front.
     NOTE: Only works if design is the unit hyper-cube.
     """
     import  design
     Y_p = []
     for _ in xrange(num_of_design_samples):
         X_design = design.latin_center(num_of_design_points, self.X.shape[1])
         Y = []
         for m in self.surrogates:
             _m = copy.copy(m)
             _m.Gaussian_noise.variance.unconstrain()
             _m.Gaussian_noise.variance.fix(1e-8)
             y = _m.posterior_samples(X_design, size=num_of_gp, full_cov=True)
             Y.append(y)
         Y = np.array(Y)
         for i in xrange(Y.shape[2]):
             if verbose:
                 print 'sampling pareto', _, i
             idx = get_idx_of_observed_pareto_front(Y[:, :, i].T)
             y_p = Y[:, idx, i].T
             Y_p.append(y_p)
     return Y_p
Exemple #2
0
 def sample_pareto_fronts(self,
                          num_of_design_samples=5,
                          num_of_gp=5,
                          num_of_design_points=1000,
                          verbose=False):
     """
     Samples a plaussible pareto front.
     NOTE: Only works if design is the unit hyper-cube.
     """
     import design
     Y_p = []
     for _ in xrange(num_of_design_samples):
         X_design = design.latin_center(num_of_design_points,
                                        self.X.shape[1])
         Y = []
         for m in self.surrogates:
             _m = copy.copy(m)
             _m.Gaussian_noise.variance.unconstrain()
             _m.Gaussian_noise.variance.fix(1e-8)
             y = _m.posterior_samples(X_design,
                                      size=num_of_gp,
                                      full_cov=True)
             Y.append(y)
         Y = np.array(Y)
         for i in xrange(Y.shape[2]):
             if verbose:
                 print 'sampling pareto', _, i
             idx = get_idx_of_observed_pareto_front(Y[:, :, i].T)
             y_p = Y[:, idx, i].T
             Y_p.append(y_p)
     return Y_p
Exemple #3
0
def new_optimization():
	global inputs, outputs, l_bounds, u_bounds, max_it, x_datalist, y_datalist, csv_valid, pareto_data, designs, bounds, initial_x_designs, mk_plots
	response = None
	my_log.write('New optimization...\n')
	if check_observations():
		Rappture.Utils.progress(10, "New model being created...")
		out_dir = 'surf_test_results_noisy_moo'
		if os.path.isdir(out_dir):
			shutil.rmtree(out_dir)
		os.makedirs(out_dir)

		X_init = np.array(x_datalist)
		Y_init = np.array(y_datalist)

		if bounds:	
			my_log.write('Bounds given...\n')
			a = literal_eval(l_bounds)
			b = literal_eval(u_bounds)
			a = np.array(a)
			b = np.array(b)
			X_design = (b-a)*design.latin_center(1000, len(X_init[0]), seed=314519) + a
		else:
			my_log.write('Initial designs given:\n' + str(initial_x_designs) + '\n')
			X_design = initial_x_designs
			
		my_log.write('Dimensionality of X: ' + str(len(X_init[0])) + '\n')
		my_log.write('Dimensionality of Y: ' + str(len(Y_init[0])) + '\n')
		
		if len(Y_init[0]) <= 2:
			mk_plots = True
		else:
			mk_plots = False	
		
		my_log.write('Creating Pareto model...\n')
		pareto_model = ParetoFront(X_init, Y_init, X_design=X_design, gp_opt_num_restarts=50, verbose=False, max_it=max_it, make_plots=mk_plots, add_at_least=30, get_fig=get_full_fig, fig_prefix=os.path.join(out_dir,'ex1'), Y_true_pareto=None, gp_fixed_noise=None, samp=100, denoised=True)
		my_log.write('Pareto model created...\n')
		Rappture.Utils.progress(20, "Performing optimization algorithm...")
		my_log.write('Starting optimization algorithm...\n')
		pareto_model.optimize_paused()
		response = pareto_model.response
		pareto_data = pareto_model.get_pareto_data()
		designs = pareto_model.get_X_design()
		my_log.write('Optimization finished, saving the model...\n')
		Rappture.Utils.progress(60, "Saving the model...")
		model_file = open('model.obj','wb')
		pkl.dump(pareto_model, model_file, pkl.HIGHEST_PROTOCOL)
		model_file.close()
		my_log.write('Process completed...\n')
		Rappture.Utils.progress(100, "Done...")

	else:
		my_log.write('Incorrect input values, finishing execution...\n')
		response = 'Incorrect tuples for new model'

	return response
Exemple #4
0
def new_optimization():
    global inputs, outputs, l_bounds, u_bounds, max_it, x_datalist, y_datalist, csv_valid
    response = None
    my_log.write('New optimization...\n')
    if check_observations():
        Rappture.Utils.progress(10, "New model being created...")
        out_dir = 'surf_test_results_noisy_moo'
        if os.path.isdir(out_dir):
            shutil.rmtree(out_dir)
        os.makedirs(out_dir)

        if csv_valid:
            X_init = x_datalist
            Y_init = y_datalist
        else:
            X_init = literal_eval(inputs)
            Y_init = literal_eval(outputs)

        X_init = np.array(X_init)
        Y_init = np.array(Y_init)

        a = literal_eval(l_bounds)
        b = literal_eval(u_bounds)
        a = np.array(a)
        b = np.array(b)
        X_design = (b - a) * design.latin_center(1000, 2, seed=314519) + a
        pareto_model = ParetoFront(X_init,
                                   Y_init,
                                   X_design=X_design,
                                   gp_opt_num_restarts=50,
                                   verbose=False,
                                   max_it=max_it,
                                   make_plots=True,
                                   add_at_least=30,
                                   get_fig=get_full_fig,
                                   fig_prefix=os.path.join(out_dir, 'ex1'),
                                   Y_true_pareto=None,
                                   gp_fixed_noise=None,
                                   samp=100,
                                   denoised=True)
        Rappture.Utils.progress(20, "Performing optimization algorithm...")
        pareto_model.optimize_paused()
        response = pareto_model.response
        Rappture.Utils.progress(60, "Saving the model...")
        model_file = open('model.obj', 'wb')
        pkl.dump(pareto_model, model_file, pkl.HIGHEST_PROTOCOL)
        model_file.close()
        Rappture.Utils.progress(100, "Done...")

    else:
        response = 'Incorrect tuples for new model'

    return response
Exemple #5
0
 def propose_experiment_paused(self, it):
     """
     Optimize the objectives and propose just 1 experiment,
     then saves the state of the program
     """
     if self.verbose:
         print 'step {0:s}'.format(str(it).zfill(len(str(self.max_it))))
         #print '\t> training surrogates'
     #self.train_surrogates()
     # Are we drawing new design points or not?
     if isinstance(self.X_design, int):
         num_design = self.X_design
         X_design = design.latin_center(num_design, self.num_dim)
     else:
         X_design = self.X_design
     if self.verbose:
         print '\t> done'
         print '\t> computing expected improvement'
     ei = self.compute_expected_improvement(X_design)
     if self.verbose:
         print '\t> done'
     i = np.argmax(ei)
     ei_max = ei[i]
     self.ei_values.append(ei_max)
     rel_ei_max = ei_max / self.ei_values[0]
     if self.verbose:
         print '\t> rel_ei_max = {0:1.3f}'.format(rel_ei_max)
     if it >= self.add_at_least and rel_ei_max < self.rtol:
         if self.verbose:
             print '*** Converged (rel_ei_max = {0:1.7f} < rtol = {1:1.2e})'.format(
                 rel_ei_max, self.rtol)
             print '\t> writing final status'
             self.plot_status(it, final=True)
         return
     if self.verbose:
         print '\t> adding design point', i
         print '\t> X_d[i, :]', X_design[i, :]
         print '\t> starting simulation'
     #print self.Y_pareto
     k = self.active_cells
     #for k in k:
     #print k
     lplus = self.active_cells_lplus
     #for lplus in lplus:
     #print lplus
     #y = self.obj_funcs(X_design[i,:])
     #print "Run the experiment/code at the following design"+str(X_design[i,:])
     self.response = "Run the experiment/code at the following design" + str(
         X_design[i, :])
     self.X_design_paused = X_design
     self.i_paused = i
def normal_execution():
    #The following three lines are useful only if the code is being used to
    #solve a known problem, like the above.

    #assert len(sys.argv)==3
    #noise = float(sys.argv[1])
    #n = int(sys.argv[2])

    #ObjFunc_noise = ObjFunc(noise,1)
    #ObjFunc_true = ObjFunc(noise,100)

    # The noisy objective function `dtlz1a`.
    # def obj_funcs_noise(x1):
    #     return ObjFunc_noise.__call__(x1)

    # # The sample averaged objective function `dtlz1a`.
    # def obj_funcs_true(x1):
    #     return ObjFunc_true.__call__(x1)

    out_dir = 'surf_test_results_noisy_moo'
    if os.path.isdir(out_dir):
        shutil.rmtree(out_dir)
    os.makedirs(out_dir)

    X_init = input('Enter the observed inputs')
    Y_init = input ('Enter the observed outputs')

    X_init = np.array(X_init)
    Y_init = np.array(Y_init)

    a = input('Enter the lower bounds of the inputs')
    b = input('Enter the upper bounds of the inputs')
    a = np.array(a)
    b = np.array(b)
    X_design = (b-a)*design.latin_center(1000, 2, seed=314519) + a
    pareto = ParetoFront(X_init, Y_init,
                         X_design=X_design,
                         gp_opt_num_restarts=50,
                         verbose=True,
                         max_it=10,
                         make_plots=True,
                         add_at_least=30,
                         get_fig=get_full_fig,
                         fig_prefix=os.path.join(out_dir,'ex1'),
                         Y_true_pareto=None,
                         gp_fixed_noise=None,
                         samp=100,
                         denoised=True
                         )
    #pareto.optimize()
    pareto.my_optimize()
Exemple #7
0
def normal_execution():
    #The following three lines are useful only if the code is being used to
    #solve a known problem, like the above.

    #assert len(sys.argv)==3
    #noise = float(sys.argv[1])
    #n = int(sys.argv[2])

    #ObjFunc_noise = ObjFunc(noise,1)
    #ObjFunc_true = ObjFunc(noise,100)

    # The noisy objective function `dtlz1a`.
    # def obj_funcs_noise(x1):
    #     return ObjFunc_noise.__call__(x1)

    # # The sample averaged objective function `dtlz1a`.
    # def obj_funcs_true(x1):
    #     return ObjFunc_true.__call__(x1)

    out_dir = 'surf_test_results_noisy_moo'
    if os.path.isdir(out_dir):
        shutil.rmtree(out_dir)
    os.makedirs(out_dir)

    X_init = input('Enter the observed inputs')
    Y_init = input('Enter the observed outputs')

    X_init = np.array(X_init)
    Y_init = np.array(Y_init)

    a = input('Enter the lower bounds of the inputs')
    b = input('Enter the upper bounds of the inputs')
    a = np.array(a)
    b = np.array(b)
    X_design = (b - a) * design.latin_center(1000, 2, seed=314519) + a
    pareto = ParetoFront(X_init,
                         Y_init,
                         X_design=X_design,
                         gp_opt_num_restarts=50,
                         verbose=True,
                         max_it=10,
                         make_plots=True,
                         add_at_least=30,
                         get_fig=get_full_fig,
                         fig_prefix=os.path.join(out_dir, 'ex1'),
                         Y_true_pareto=None,
                         gp_fixed_noise=None,
                         samp=100,
                         denoised=True)
    #pareto.optimize()
    pareto.my_optimize()
Exemple #8
0
 def propose_experiment_paused(self, it):
     """
     Optimize the objectives and propose just 1 experiment,
     then saves the state of the program
     """
     if self.verbose:
         print 'step {0:s}'.format(str(it).zfill(len(str(self.max_it))))
         #print '\t> training surrogates'
     #self.train_surrogates()
     # Are we drawing new design points or not?
     if isinstance(self.X_design, int):
         num_design = self.X_design
         X_design = design.latin_center(num_design, self.num_dim)
     else:
         X_design = self.X_design
     if self.verbose:
         print '\t> done'
         print '\t> computing expected improvement'
     ei = self.compute_expected_improvement(X_design)
     if self.verbose:
         print '\t> done'
     i = np.argmax(ei)
     ei_max = ei[i]
     self.ei_values.append(ei_max)
     rel_ei_max = ei_max / self.ei_values[0]
     if self.verbose:
         print '\t> rel_ei_max = {0:1.3f}'.format(rel_ei_max)
     if it >= self.add_at_least and rel_ei_max < self.rtol:
         if self.verbose:
             print '*** Converged (rel_ei_max = {0:1.7f} < rtol = {1:1.2e})'.format(rel_ei_max, self.rtol)
             print '\t> writing final status'
             self.plot_status(it,final=True)
         return
     if self.verbose:
         print '\t> adding design point', i
         print '\t> X_d[i, :]', X_design[i, :]
         print '\t> starting simulation'
     #print self.Y_pareto
     k = self.active_cells
     #for k in k:
         #print k
     lplus = self.active_cells_lplus
     #for lplus in lplus:
         #print lplus
     #y = self.obj_funcs(X_design[i,:])
     #print "Run the experiment/code at the following design"+str(X_design[i,:])
     self.response = "Run the experiment/code at the following design"+str(X_design[i,:])
     self.X_design_paused = X_design
     self.i_paused = i
Exemple #9
0
def new_optimization():
	global inputs, outputs, l_bounds, u_bounds, max_it, x_datalist, y_datalist, csv_valid
	response = None
	my_log.write('New optimization...\n')
	if check_observations():
		Rappture.Utils.progress(10, "New model being created...")
		out_dir = 'surf_test_results_noisy_moo'
		if os.path.isdir(out_dir):
			shutil.rmtree(out_dir)
		os.makedirs(out_dir)

		if csv_valid:
			X_init = x_datalist
			Y_init = y_datalist
		else:
			X_init = literal_eval(inputs)
			Y_init = literal_eval(outputs)
			
		X_init = np.array(X_init)
		Y_init = np.array(Y_init)

		a = literal_eval(l_bounds)
		b = literal_eval(u_bounds)
		a = np.array(a)
		b = np.array(b)
		X_design = (b-a)*design.latin_center(1000, 2, seed=314519) + a
		pareto_model = ParetoFront(X_init, Y_init, X_design=X_design, gp_opt_num_restarts=50, verbose=False, max_it=max_it, make_plots=True, add_at_least=30, get_fig=get_full_fig, fig_prefix=os.path.join(out_dir,'ex1'), Y_true_pareto=None, gp_fixed_noise=None, samp=100, denoised=True)
		Rappture.Utils.progress(20, "Performing optimization algorithm...")
		pareto_model.optimize_paused()
		response = pareto_model.response
		Rappture.Utils.progress(60, "Saving the model...")
		model_file = open('model.obj','wb')
		pkl.dump(pareto_model, model_file, pkl.HIGHEST_PROTOCOL)
		model_file.close()
		Rappture.Utils.progress(100, "Done...")

	else:
		response = 'Incorrect tuples for new model'

	return response
 def optimize_step(self, it):
     """
     Perform a single optimization step.
     """
     # Train current model
     self._ei = None
     self._denoised_posterior_samples = None
     self._best_idx = None
     if self.mcmc_start_from_scratch:
         self._model = None
     else:
         self.model.set_XY(self.X, self.Y[:, None])
     if self.renew_design:
         self.X_design = design.latin_center(*self.X_design.shape)
         self.model._X_predict = self.X_design
     if ((it == 0 and self.optimize_model_before_init_mcmc) or
             self.optimize_model_before_mcmc):
         self.model.optimize()
         print str(self.model)
         print self.model.kern.lengthscale
     if self.verbose:
         print '\t> starting mcmc sampling'
     self.model.pymc_mcmc.sample(self.num_mcmc_samples,
                            burn=self.num_mcmc_burn,
                            thin=self.num_mcmc_thin,
                            tune_throughout=self.mcmc_tune_throughout,
                            progress_bar=self.mcmc_progress_bar)
     # Find best expected improvement
     ei = self.ei
     i = np.argmax(ei)
     # Do the simulation and add it
     self.idx_X_obs.append(i)
     self.Y_obs.append(self.func(self.X_design[i], *self.args))
     if self.verbose:
         print '\t> design point id to be added : {0:d}'.format(i)
         print '\t> maximum expected improvement: {0:1.3f}'.format(ei[i])
     return i, ei[i]
 def optimize_step(self, it):
     """
     Perform a single optimization step.
     """
     # Train current model
     self._ei = None
     self._denoised_posterior_samples = None
     self._best_idx = None
     if self.mcmc_start_from_scratch:
         self._model = None
     else:
         self.model.set_XY(self.X, self.Y[:, None])
     if self.renew_design:
         self.X_design = design.latin_center(*self.X_design.shape)
         self.model._X_predict = self.X_design
     if ((it == 0 and self.optimize_model_before_init_mcmc)
             or self.optimize_model_before_mcmc):
         self.model.optimize()
         print str(self.model)
         print self.model.kern.lengthscale
     if self.verbose:
         print '\t> starting mcmc sampling'
     self.model.pymc_mcmc.sample(self.num_mcmc_samples,
                                 burn=self.num_mcmc_burn,
                                 thin=self.num_mcmc_thin,
                                 tune_throughout=self.mcmc_tune_throughout,
                                 progress_bar=self.mcmc_progress_bar)
     # Find best expected improvement
     ei = self.ei
     i = np.argmax(ei)
     # Do the simulation and add it
     self.idx_X_obs.append(i)
     self.Y_obs.append(self.func(self.X_design[i], *self.args))
     if self.verbose:
         print '\t> design point id to be added : {0:d}'.format(i)
         print '\t> maximum expected improvement: {0:1.3f}'.format(ei[i])
     return i, ei[i]
Exemple #12
0
def paused_execution():

    print 'Trying to load previous model...'
    try:
        f = open('model.obj','rb')
        pareto = pkl.load(f)
        f.close()
        pareto.my_optimize_paused()
        print 'Saving model...'
        f = open('model.obj','wb')
        pkl.dump(pareto, f, pkl.HIGHEST_PROTOCOL)
        f.close()
    except IOError:
        print 'New model being created...'
        #The following three lines are useful only if the code is being used to
        #solve a known problem, like the above.

        #assert len(sys.argv)==3
        #noise = float(sys.argv[1])
        #n = int(sys.argv[2])

        #ObjFunc_noise = ObjFunc(noise,1)
        #ObjFunc_true = ObjFunc(noise,100)

        # The noisy objective function `dtlz1a`.
        # def obj_funcs_noise(x1):
        #     return ObjFunc_noise.__call__(x1)

        # # The sample averaged objective function `dtlz1a`.
        # def obj_funcs_true(x1):
        #     return ObjFunc_true.__call__(x1)

        out_dir = 'surf_test_results_noisy_moo'
        if os.path.isdir(out_dir):
            shutil.rmtree(out_dir)
        os.makedirs(out_dir)

        X_init = input('Enter the observed inputs')
        Y_init = input ('Enter the observed outputs')

        X_init = np.array(X_init)
        Y_init = np.array(Y_init)

        a = input('Enter the lower bounds of the inputs')
        b = input('Enter the upper bounds of the inputs')
        a = np.array(a)
        b = np.array(b)
        X_design = (b-a)*design.latin_center(1000, 2, seed=314519) + a

        pareto = ParetoFront(X_init, Y_init,
                             X_design=X_design,
                             gp_opt_num_restarts=50,
                             verbose=True,
                             max_it=10,
                             make_plots=True,
                             add_at_least=30,
                             get_fig=get_full_fig,
                             fig_prefix=os.path.join(out_dir,'ex1'),
                             Y_true_pareto=None,
                             gp_fixed_noise=None,
                             samp=100,
                             denoised=True
                             )
        pareto.my_optimize_paused()
        print 'Saving model...'
        f = open('model.obj','wb')
        pkl.dump(pareto, f, pkl.HIGHEST_PROTOCOL)
        f.close()
Exemple #13
0
    n = int(sys.argv[2])

    ObjFunc_noise = ObjFunc(noise,noise,1)
    ObjFunc_true = ObjFunc(noise,noise,100)
    
    # The objective function `dtlz1a`.
    def obj_funcs_noise(x1):
        return ObjFunc_noise.__call__(x1)

    def obj_funcs_true(x1):
        return ObjFunc_true.__call__(x1)
    out_dir = 'ex1_results_n={0:d}_sigma={1:s}'.format(n,sys.argv[1])
    if os.path.isdir(out_dir):
        shutil.rmtree(out_dir)
    os.makedirs(out_dir)
    X_init = design.latin_center(n, 6, seed=1234)
    Y_init = np.array([obj_funcs_noise(x) for x in X_init])
    X_d_for_true = design.latin_center(10000, 6, seed=23415)
    X_design = design.latin_center(100, 6, seed=314519)
    Y_true = np.array([obj_funcs_true(x) for x in X_d_for_true])
    #Y_true = np.load('true_data.npy')
    pareto = ParetoFront(X_init, Y_init, obj_funcs_noise, obj_funcs_true,
                         X_design=1000,
                         gp_opt_num_restarts=20,
                         verbose=True,
                         max_it = 100,
                         make_plots=True,
                         add_at_least=30,
                         get_fig=get_full_fig,
                         fig_prefix=os.path.join(out_dir,'ex1'),
                         Y_true_pareto=Y_true,
"""
Construct a centered Latin Square design.

Author:
    Ilias Bilionis

Date:
    3/19/2014

"""


import design
import matplotlib.pyplot as plt

# The number of input dimensions
num_dim = 2
# The number of points you want
num_points = 10
# Create the design
X = design.latin_center(num_points, num_dim)
# Look at it
print X
# And plot it
plt.plot(X[:, 0], X[:, 1], '.', markersize=10)
plt.xlabel('$x_1$', fontsize=16)
plt.ylabel('$x_2$', fontsize=16)
plt.title('Centered Latin Square Design', fontsize=16)
plt.show()
Exemple #15
0
 def optimize(self):
     """
     Optimize the objectives, i.e., discover the Pareto front.
     """
     self.ei_values = []
     for it in xrange(self.max_it):
         if self.verbose:
             print 'step {0:s}'.format(str(it).zfill(len(str(self.max_it))))
             #print '\t> training surrogates'
         #self.train_surrogates()
         # Are we drawing new design points or not?
         if isinstance(self.X_design, int):
             num_design = self.X_design
             X_design = design.latin_center(num_design, self.num_dim)
         else:
             X_design = self.X_design
         if self.verbose:
             print '\t> done'
             print '\t> computing expected improvement'
         ei = self.compute_expected_improvement(X_design)
         if self.verbose:
             print '\t> done'
         i = np.argmax(ei)
         ei_max = ei[i]
         self.ei_values.append(ei_max)
         rel_ei_max = ei_max / self.ei_values[0]
         if self.verbose:
             print '\t> rel_ei_max = {0:1.3f}'.format(rel_ei_max)
         if it >= self.add_at_least and rel_ei_max < self.rtol:
             if self.verbose:
                 print '*** Converged (rel_ei_max = {0:1.7f} < rtol = {1:1.2e})'.format(rel_ei_max, self.rtol)
                 print '\t> writing final status'
                 self.plot_status(it,final=True)
             break
         if self.verbose:
             print '\t> adding design point', i
             print '\t> X_d[i, :]', X_design[i, :]
             print '\t> starting simulation'
         #print self.Y_pareto
         k = self.active_cells
         #for k in k:
             #print k
         lplus = self.active_cells_lplus
         #for lplus in lplus:
             #print lplus
         #y = self.obj_funcs(X_design[i,:])
         print "Run the experiment/code at the following design"+str(X_design[i,:])
         y = input('Enter the observed value at the new design')
         self.add_new_observations(X_design[i, :], y)
         if self.verbose:
             print '\t> training surrogates now'
         self.train_surrogates()
         self.Y_p = self.get_projected_observations()
         self.idx = get_idx_of_observed_pareto_front(self.Y_p)
         self.b = compute_sorted_list_of_pareto_points(self.Y_pareto, self.y_ref)
         #self.Y_true_noiseless = np.array([self.obj_funcs_true(x) for x in self.X])
         if self.verbose:
             print '\t> done'
         if not isinstance(self.X_design, int):
             self.X_design = np.delete(self.X_design, i, 0)
         if self.make_plots:
             if it==(self.max_it-1):
                 self.plot_status(it,final=True)
             else:
                 self.plot_status(it)
Exemple #16
0
        self.sigma = sigma

    def __call__(self, x):
        return self.f1(x), self.f2(x)


if __name__ == '__main__':
    noise = float(sys.argv[1])
    ObjFunc_noise = ObjFunc(noise, 1)

    # The objective function from `Binois et al`.
    def obj_funcs_noise(x):
        return ObjFunc_noise.__call__(x)

    if sys.argv[2].isdigit():
        x = design.latin_center(sys.argv[2], 2)
        x_string_array = str(x).split('\n')
        inp = open('x_2.csv', 'wb')
        for i, string in enumerate(x_string_array):
            n_string = string.replace('[', '')
            n_string = n_string.replace(']', '')
            n_string = n_string.replace(' ', '')
            if i < len(x_string_array) - 1:
                inp.write(n_string[:4] + ',' + n_string[4:len(n_string)] +
                          '\n')
            else:
                inp.write(n_string[:4] + ',' + n_string[4:len(n_string)])
        inp.close()

        print 'the inputs are' + str(x)
        y = np.array([obj_funcs_noise(x) for x in x])
print '+ making', out_dir
os.makedirs(out_dir)

# We are looking for the minimum over these points
# Use something like the following for a generic problem
#X_design = domain[:, 0] + (domain[:, 1] - domain[:, 0]) * design.latin_center(num_design, 2) 
# For this one we use a regular grid only because we want to do some contour
# plots
x1 = np.linspace(domain[0, 0], domain[0, 1], num_design)
x2 = np.linspace(domain[1, 0], domain[1, 1], num_design)
X1, X2= np.meshgrid(x1, x2)
X_design = np.hstack([X1.flatten()[:, None], X2.flatten()[:, None]])

# The initial points to start from
X_init = np.random.rand(num_init)[:, None] * 6.
X_init = domain[:, 0] + (domain[:, 1] - domain[:, 0]) * design.latin_center(num_init, 2) 

# Globally minimize f
x, y, ei, _ = pybgo.minimize(f, X_init, X_design, tol=1e-5,
                          callback=pybgo.plot_summary_2d,      # This plots the results
                                                            # at each iteration of
                                                            # the algorithm
                          prefix=os.path.join(out_dir, 'out'),
                          save_model=True)

# The best value at each iteration
bv = np.array([y[:i, 0].min() for i in xrange(1, y.shape[0])])

fig, ax = plt.subplots()
it = np.arange(1, bv.shape[0] + 1)
ax.plot(it, bv, linewidth=2)
Exemple #18
0
        self.n_samp = n_samp
        self.sigma = sigma

    def __call__(self,x):
        return self.f1(x), self.f2(x)

if __name__ == '__main__':
    noise = float(sys.argv[1])
    ObjFunc_noise = ObjFunc(noise,1)

    # The objective function from `Binois et al`.
    def obj_funcs_noise(x):
        return ObjFunc_noise.__call__(x)

    if sys.argv[2].isdigit():
        x = design.latin_center(sys.argv[2],2)
        x_string_array = str(x).split('\n')
        inp = open('x_2.csv','wb')
        for i, string in enumerate(x_string_array):
            n_string = string.replace('[','')
            n_string = n_string.replace(']','')
            n_string = n_string.replace(' ','')
            if i < len(x_string_array)-1:
                inp.write(n_string[:4] + ',' + n_string[4:len(n_string)] + '\n')
            else:
                inp.write(n_string[:4] + ',' + n_string[4:len(n_string)])
        inp.close()

        print 'the inputs are'+ str(x)
        y = np.array([obj_funcs_noise(x) for x in x])
Exemple #19
0
import math
import GPy
import matplotlib.pyplot as plt
import seaborn as sns
from design import latin_center


def f(x, sigma=0.):
    """
    A 1D function to look at.
    """
    return 4. * (1. - np.exp(-0.1 * x) * np.sin((x + 8. * np.exp(x - 7.)))) + \
           sigma * np.random.randn(*x.shape)


np.random.seed(3145252)

# The objective
objective = lambda (x): f(x, sigma=1.5)

# Just for plotting
X_design = np.linspace(0, 6., 100)[:, None]

# The initial points to start from
X_init = latin_center(5, 1) * 6.

Y_init = f(X_init)

optimizer = pybgo.GlobalOptimizer(X_init, X_design, objective, true_func=f)
optimizer.optimize()
Exemple #20
0
def new_optimization():
    global inputs, outputs, l_bounds, u_bounds, max_it, x_datalist, y_datalist, csv_valid, pareto_data, designs, bounds, initial_x_designs, mk_plots
    response = None
    my_log.write('New optimization...\n')
    if check_observations():
        Rappture.Utils.progress(10, "New model being created...")
        out_dir = 'surf_test_results_noisy_moo'
        if os.path.isdir(out_dir):
            shutil.rmtree(out_dir)
        os.makedirs(out_dir)

        X_init = np.array(x_datalist)
        Y_init = np.array(y_datalist)

        if bounds:
            my_log.write('Bounds given...\n')
            a = literal_eval(l_bounds)
            b = literal_eval(u_bounds)
            a = np.array(a)
            b = np.array(b)
            X_design = (b - a) * design.latin_center(
                1000, len(X_init[0]), seed=314519) + a
        else:
            my_log.write('Initial designs given:\n' + str(initial_x_designs) +
                         '\n')
            X_design = initial_x_designs

        my_log.write('Dimensionality of X: ' + str(len(X_init[0])) + '\n')
        my_log.write('Dimensionality of Y: ' + str(len(Y_init[0])) + '\n')

        if len(Y_init[0]) <= 2:
            mk_plots = True
        else:
            mk_plots = False

        my_log.write('Creating Pareto model...\n')
        pareto_model = ParetoFront(X_init,
                                   Y_init,
                                   X_design=X_design,
                                   gp_opt_num_restarts=50,
                                   verbose=False,
                                   max_it=max_it,
                                   make_plots=mk_plots,
                                   add_at_least=30,
                                   get_fig=get_full_fig,
                                   fig_prefix=os.path.join(out_dir, 'ex1'),
                                   Y_true_pareto=None,
                                   gp_fixed_noise=None,
                                   samp=100,
                                   denoised=True)
        my_log.write('Pareto model created...\n')
        Rappture.Utils.progress(20, "Performing optimization algorithm...")
        my_log.write('Starting optimization algorithm...\n')
        pareto_model.optimize_paused()
        response = pareto_model.response
        pareto_data = pareto_model.get_pareto_data()
        designs = pareto_model.get_X_design()
        my_log.write('Optimization finished, saving the model...\n')
        Rappture.Utils.progress(60, "Saving the model...")
        model_file = open('model.obj', 'wb')
        pkl.dump(pareto_model, model_file, pkl.HIGHEST_PROTOCOL)
        model_file.close()
        my_log.write('Process completed...\n')
        Rappture.Utils.progress(100, "Done...")

    else:
        my_log.write('Incorrect input values, finishing execution...\n')
        response = 'Incorrect tuples for new model'

    return response
Exemple #21
0
def paused_execution():

    print 'Trying to load previous model...'
    try:
        f = open('model.obj', 'rb')
        pareto = pkl.load(f)
        f.close()
        pareto.my_optimize_paused()
        print 'Saving model...'
        f = open('model.obj', 'wb')
        pkl.dump(pareto, f, pkl.HIGHEST_PROTOCOL)
        f.close()
    except IOError:
        print 'New model being created...'
        #The following three lines are useful only if the code is being used to
        #solve a known problem, like the above.

        #assert len(sys.argv)==3
        #noise = float(sys.argv[1])
        #n = int(sys.argv[2])

        #ObjFunc_noise = ObjFunc(noise,1)
        #ObjFunc_true = ObjFunc(noise,100)

        # The noisy objective function `dtlz1a`.
        # def obj_funcs_noise(x1):
        #     return ObjFunc_noise.__call__(x1)

        # # The sample averaged objective function `dtlz1a`.
        # def obj_funcs_true(x1):
        #     return ObjFunc_true.__call__(x1)

        out_dir = 'surf_test_results_noisy_moo'
        if os.path.isdir(out_dir):
            shutil.rmtree(out_dir)
        os.makedirs(out_dir)

        X_init = input('Enter the observed inputs')
        Y_init = input('Enter the observed outputs')

        X_init = np.array(X_init)
        Y_init = np.array(Y_init)

        a = input('Enter the lower bounds of the inputs')
        b = input('Enter the upper bounds of the inputs')
        a = np.array(a)
        b = np.array(b)
        X_design = (b - a) * design.latin_center(1000, 2, seed=314519) + a

        pareto = ParetoFront(X_init,
                             Y_init,
                             X_design=X_design,
                             gp_opt_num_restarts=50,
                             verbose=True,
                             max_it=10,
                             make_plots=True,
                             add_at_least=30,
                             get_fig=get_full_fig,
                             fig_prefix=os.path.join(out_dir, 'ex1'),
                             Y_true_pareto=None,
                             gp_fixed_noise=None,
                             samp=100,
                             denoised=True)
        pareto.my_optimize_paused()
        print 'Saving model...'
        f = open('model.obj', 'wb')
        pkl.dump(pareto, f, pkl.HIGHEST_PROTOCOL)
        f.close()
Exemple #22
0
 def optimize(self):
     """
     Optimize the objectives, i.e., discover the Pareto front.
     """
     self.ei_values = []
     for it in xrange(self.max_it):
         if self.verbose:
             print 'step {0:s}'.format(str(it).zfill(len(str(self.max_it))))
             #print '\t> training surrogates'
         #self.train_surrogates()
         # Are we drawing new design points or not?
         if isinstance(self.X_design, int):
             num_design = self.X_design
             X_design = design.latin_center(num_design, self.num_dim)
         else:
             X_design = self.X_design
         if self.verbose:
             print '\t> done'
             print '\t> computing expected improvement'
         ei = self.compute_expected_improvement(X_design)
         if self.verbose:
             print '\t> done'
         i = np.argmax(ei)
         ei_max = ei[i]
         self.ei_values.append(ei_max)
         rel_ei_max = ei_max / self.ei_values[0]
         if self.verbose:
             print '\t> rel_ei_max = {0:1.3f}'.format(rel_ei_max)
         if it >= self.add_at_least and rel_ei_max < self.rtol:
             if self.verbose:
                 print '*** Converged (rel_ei_max = {0:1.7f} < rtol = {1:1.2e})'.format(
                     rel_ei_max, self.rtol)
                 print '\t> writing final status'
                 self.plot_status(it, final=True)
             break
         if self.verbose:
             print '\t> adding design point', i
             print '\t> X_d[i, :]', X_design[i, :]
             print '\t> starting simulation'
         #print self.Y_pareto
         k = self.active_cells
         #for k in k:
         #print k
         lplus = self.active_cells_lplus
         #for lplus in lplus:
         #print lplus
         #y = self.obj_funcs(X_design[i,:])
         print "Run the experiment/code at the following design" + str(
             X_design[i, :])
         y = input('Enter the observed value at the new design')
         self.add_new_observations(X_design[i, :], y)
         if self.verbose:
             print '\t> training surrogates now'
         self.train_surrogates()
         self.Y_p = self.get_projected_observations()
         self.idx = get_idx_of_observed_pareto_front(self.Y_p)
         self.b = compute_sorted_list_of_pareto_points(
             self.Y_pareto, self.y_ref)
         #self.Y_true_noiseless = np.array([self.obj_funcs_true(x) for x in self.X])
         if self.verbose:
             print '\t> done'
         if not isinstance(self.X_design, int):
             self.X_design = np.delete(self.X_design, i, 0)
         if self.make_plots:
             if it == (self.max_it - 1):
                 self.plot_status(it, final=True)
             else:
                 self.plot_status(it)
Exemple #23
0
 out_dir = 'surf_test_results_noisy_moo'
 if os.path.isdir(out_dir):
     shutil.rmtree(out_dir)
 os.makedirs(out_dir)
 
 X_init = input('Enter the observed inputs')
 Y_init = input ('Enter the observed outputs')
 
 X_init = np.array(X_init)
 Y_init = np.array(Y_init)
 
 a = input('Enter the lower bounds of the inputs')
 b = input('Enter the upper bounds of the inputs')
 a = np.array(a)
 b = np.array(b)
 X_design = (b-a)*design.latin_center(1000, 2, seed=314519) + a
 pareto = ParetoFront(X_init, Y_init,
                      X_design=X_design,
                      gp_opt_num_restarts=50,
                      verbose=True,
                      max_it=10,
                      make_plots=True,
                      add_at_least=30,
                      get_fig=get_full_fig,
                      fig_prefix=os.path.join(out_dir,'ex1'),
                      Y_true_pareto=None,
                      gp_fixed_noise=None,
                      samp=1000,
                      denoised=True
                      )     
 pareto.optimize()
Exemple #24
0
    ObjFunc_noise = ObjFunc(noise, noise, 1)
    ObjFunc_true = ObjFunc(noise, noise, 100)

    # The objective function `dtlz1a`.
    def obj_funcs_noise(x1):
        return ObjFunc_noise.__call__(x1)

    def obj_funcs_true(x1):
        return ObjFunc_true.__call__(x1)

    out_dir = 'ex1_results_n={0:d}_sigma={1:s}'.format(n, sys.argv[1])
    if os.path.isdir(out_dir):
        shutil.rmtree(out_dir)
    os.makedirs(out_dir)
    X_init = design.latin_center(n, 6, seed=1234)
    Y_init = np.array([obj_funcs_noise(x) for x in X_init])
    X_d_for_true = design.latin_center(10000, 6, seed=23415)
    X_design = design.latin_center(100, 6, seed=314519)
    Y_true = np.array([obj_funcs_true(x) for x in X_d_for_true])
    #Y_true = np.load('true_data.npy')
    pareto = ParetoFront(X_init,
                         Y_init,
                         obj_funcs_noise,
                         obj_funcs_true,
                         X_design=1000,
                         gp_opt_num_restarts=20,
                         verbose=True,
                         max_it=100,
                         make_plots=True,
                         add_at_least=30,
print '+ making', out_dir
os.makedirs(out_dir)

# We are looking for the minimum over these points
# Use something like the following for a generic problem
#X_design = domain[:, 0] + (domain[:, 1] - domain[:, 0]) * design.latin_center(num_design, 2)
# For this one we use a regular grid only because we want to do some contour
# plots
x1 = np.linspace(domain[0, 0], domain[0, 1], num_design)
x2 = np.linspace(domain[1, 0], domain[1, 1], num_design)
X1, X2 = np.meshgrid(x1, x2)
X_design = np.hstack([X1.flatten()[:, None], X2.flatten()[:, None]])

# The initial points to start from
X_init = np.random.rand(num_init)[:, None] * 6.
X_init = domain[:, 0] + (domain[:, 1] - domain[:, 0]) * design.latin_center(
    num_init, 2)

# Globally minimize f
x, y, ei, _ = pybgo.minimize(
    f,
    X_init,
    X_design,
    tol=1e-5,
    callback=pybgo.plot_summary_2d,  # This plots the results
    # at each iteration of
    # the algorithm
    prefix=os.path.join(out_dir, 'out'),
    save_model=True)

# The best value at each iteration
bv = np.array([y[:i, 0].min() for i in xrange(1, y.shape[0])])
import math
import GPy
import matplotlib.pyplot as plt
import seaborn as sns
from design import latin_center


def f(x, sigma=0.):
    """
    A 1D function to look at.
    """
    return 4. * (1. - np.exp(-0.1 * x) * np.sin((x + 8. * np.exp(x - 7.)))) + \
           sigma * np.random.randn(*x.shape)

np.random.seed(3145252)

# The objective
objective = lambda(x): f(x, sigma=1.5)

# Just for plotting
X_design = np.linspace(0, 6., 100)[:, None]

# The initial points to start from
X_init = latin_center(5, 1) * 6.

Y_init = f(X_init)

optimizer = pybgo.GlobalOptimizer(X_init, X_design, objective,
                                  true_func=f)
optimizer.optimize()
Exemple #27
0
"""
Construct a centered Latin Square design.

Author:
    Ilias Bilionis

Date:
    3/19/2014

"""

import design
import matplotlib.pyplot as plt

# The number of input dimensions
num_dim = 2
# The number of points you want
num_points = 10
# Create the design
X = design.latin_center(num_points, num_dim)
# Look at it
print X
# And plot it
plt.plot(X[:, 0], X[:, 1], '.', markersize=10)
plt.xlabel('$x_1$', fontsize=16)
plt.ylabel('$x_2$', fontsize=16)
plt.title('Centered Latin Square Design', fontsize=16)
plt.show()
Exemple #28
0
    out_dir = 'surf_test_results_noisy_moo'
    if os.path.isdir(out_dir):
        shutil.rmtree(out_dir)
    os.makedirs(out_dir)

    X_init = input('Enter the observed inputs')
    Y_init = input('Enter the observed outputs')

    X_init = np.array(X_init)
    Y_init = np.array(Y_init)

    a = input('Enter the lower bounds of the inputs')
    b = input('Enter the upper bounds of the inputs')
    a = np.array(a)
    b = np.array(b)
    X_design = (b - a) * design.latin_center(1000, 2, seed=314519) + a
    pareto = ParetoFront(X_init,
                         Y_init,
                         X_design=X_design,
                         gp_opt_num_restarts=50,
                         verbose=True,
                         max_it=10,
                         make_plots=True,
                         add_at_least=30,
                         get_fig=get_full_fig,
                         fig_prefix=os.path.join(out_dir, 'ex1'),
                         Y_true_pareto=None,
                         gp_fixed_noise=None,
                         samp=1000,
                         denoised=True)
    pareto.optimize()