Пример #1
0
    Y = r.multivariate_normal(mean, cov)
    Y = np.reshape(Y, (8**d, 1))
    print(Y[:5, 0])
    aux_model.append(GPy.models.GPRegression(grid, Y, kernel, noise_var=1e-10))


def h(X):
    X = np.atleast_2d(X)
    hX = np.empty((m, X.shape[0]))
    for j in range(m):
        hX[j, :] = aux_model[j].posterior_mean(X)[:, 0]
    return hX


# --- Objective
objective = MultiObjective(h, as_list=False, output_dim=m)

# --- Space
space = GPyOpt.Design_space(space=[{
    'name': 'var',
    'type': 'continuous',
    'domain': (0, 1),
    'dimensionality': d
}])

# --- Model (Multi-output GP)
n_attributes = m
model = multi_outputGP(output_dim=n_attributes,
                       exact_feval=[True] * m,
                       fixed_hyps=False)
Пример #2
0
def function_caller_NN(rep):

    np.random.seed(rep)
    # func2 = dropwave()
    function_rejected = True
    s = 0
    print(os.getcwd())
    while function_rejected or s <= 1:
        #for i in range(2):
        try:
            RMITD_f = FC_NN_test_function()
            function_rejected = False
            s += 1
        except:
            function_rejected = True
            print("function_rejected check path inside function")
            pass
    # --- Attributes
    #repeat same objective function to solve a 1 objective problem
    f = MultiObjective([RMITD_f.f])
    c = MultiObjective([RMITD_f.c])

    # --- Attributes
    #repeat same objective function to solve a 1 objective problem

    #c2 = MultiObjective([test_c2])
    # --- Space
    #define space of variables
    space = GPyOpt.Design_space(
        space=[{
            'name': 'var_1',
            'type': 'continuous',
            'domain': (0.0, 0.99)
        }, {
            'name': 'var_2',
            'type': 'continuous',
            'domain': (0.0, 0.99)
        }, {
            'name': 'var_2',
            'type': 'continuous',
            'domain': (5, 12)
        }, {
            'name': 'var_2',
            'type': 'continuous',
            'domain': (5, 12)
        }]
    )  #GPyOpt.Design_space(space =[{'name': 'var_1', 'type': 'continuous', 'domain': (0,100)}])#
    n_f = 1
    n_c = 1
    noise = 0.002**2
    model_f = multi_outputGP(output_dim=n_f,
                             noise_var=[noise] * n_f,
                             exact_feval=[True] * n_f)
    model_c = multi_outputGP(output_dim=n_c,
                             noise_var=[1e-21] * n_c,
                             exact_feval=[True] * n_c)

    # --- Aquisition optimizer
    #optimizer for inner acquisition function
    acq_opt = GPyOpt.optimization.AcquisitionOptimizer(optimizer='lbfgs',
                                                       space=space,
                                                       model=model_f,
                                                       model_c=model_c)
    #
    # # --- Initial design
    #initial design
    init_num_samples = 18
    initial_design = GPyOpt.experiment_design.initial_design(
        'latin', space, init_num_samples)

    nz = 1
    acquisition = KG(model=model_f,
                     model_c=model_c,
                     space=space,
                     nz=nz,
                     optimizer=acq_opt)
    evaluator = GPyOpt.core.evaluators.Sequential(acquisition)
    bo = BO(model_f,
            model_c,
            space,
            f,
            c,
            acquisition,
            evaluator,
            initial_design,
            expensive=True,
            deterministic=False)

    max_iter = 30
    # print("Finished Initialization")
    X, Y, C, Opportunity_cost = bo.run_optimization(max_iter=max_iter,
                                                    verbosity=False)
    print("Code Ended")

    C_bool = np.product(np.concatenate(C, axis=1) < 0, axis=1)
    data = {}
    print("C", C)
    print("np.array(Opportunity_cost).reshape(-1)",
          np.array(Opportunity_cost).reshape(-1))
    print("np.array(Y).reshape(-1)", np.array(Y).reshape(-1))
    print("np.array(C_bool).reshape(-1)", np.array(C_bool).reshape(-1))
    data["Opportunity_cost"] = np.concatenate(
        (np.zeros(init_num_samples), np.array(Opportunity_cost).reshape(-1)))
    data["Y"] = np.array(Y).reshape(-1)
    data["C_bool"] = np.array(C_bool).reshape(-1)

    gen_file = pd.DataFrame.from_dict(data)
    folder = "RESULTS"
    subfolder = "NN_KG"
    cwd = os.getcwd()
    print("cwd", cwd)
    path = cwd + "/" + folder + "/" + subfolder + '/it_' + str(rep) + '.csv'
    if os.path.isdir(cwd + "/" + folder + "/" + subfolder) == False:
        os.makedirs(cwd + "/" + folder + "/" + subfolder)

    gen_file.to_csv(path_or_buf=path)

    print("X", X, "Y", Y, "C", C)
def HOLE_function_caller_test(rep):

    penalty =0
    noise = 1e-6
    alpha =1.95
    np.random.seed(rep)

    folder = "RESULTS"
    subfolder = "HOLE_ParEGO_utilityDM_Lin_utilityAlg_Tche"
    cwd = os.getcwd()
    path = cwd + "/" + folder + "/"+subfolder

    # func2 = dropwave()
    POL_func= HOLE(sd=np.sqrt(noise))
    ref_point = POL_func.ref_point

    # --- Attributes
    #repeat same objective function to solve a 1 objective problem
    f = MultiObjective([POL_func.f1, POL_func.f2])
    # c = MultiObjective([POL_func.c1, POL_func.c2])

    # --- Attributes
    #repeat same objective function to solve a 1 objective problem

    #c2 = MultiObjective([test_c2])
    # --- Space
    #define space of variables
    space =  GPyOpt.Design_space(space =[{'name': 'var_1', 'type': 'continuous', 'domain': (-1.0, 1.0)},{'name': 'var_2', 'type': 'continuous', 'domain': (-1.0, 1.0)}])#GPyOpt.Design_space(space =[{'name': 'var_1', 'type': 'continuous', 'domain': (0,100)}])#

    n_f = 1
    n_c = 0
    input_d = 2
    m =2


    model_f = multi_outputGP(output_dim = n_f,   noise_var=[noise]*n_f, exact_feval=[True]*n_f)
    #model_c = multi_outputGP(output_dim = n_c,  noise_var=[1e-7]*n_c, exact_feval=[True]*n_c)

    # --- Aquisition optimizer
    #optimizer for inner acquisition function
    acq_opt = GPyOpt.optimization.AcquisitionOptimizer(optimizer='lbfgs', inner_optimizer='Nelder_Mead',space=space, model=model_f, model_c=None)


    # --- Initial design
    #initial design
    initial_design = GPyOpt.experiment_design.initial_design('latin', space, 2*(input_d+1))

    # --- Utility function
    def prior_sample_generator(n_samples=1, seed=None):
        if seed is None:

            samples = np.random.dirichlet(np.ones((m,)), n_samples)
            print("samples", samples)

        else:
            random_state = np.random.RandomState(seed)
            samples = random_state.dirichlet(np.ones((m,)), n_samples)
            print("samples", samples)

        return samples

    def prior_density(x):
        assert x.shape[1] == m, "wrong dimension"
        output = np.zeros(x.shape[0])
        for i in range(len(output)):
            output[i] = dirichlet.pdf(x=x[i], alpha=np.ones((m,)))
        return output.reshape(-1)

    def U_func(parameter, y):
        w = parameter
        scaled_vectors = np.multiply(w, y)
        utility = np.max(scaled_vectors, axis=1)
        utility = np.atleast_2d(utility)
        print("utility", utility.T)
        return utility.T


    def dU_func(parameter, y):
        raise
        return 0

    ##### Utility
    n_samples = 1
    support = prior_sample_generator(n_samples=n_samples)  # generates the support to marginalise the parameters for acquisition function inside the optimisation process. E_theta[EI(x)]

    prob_dist = prior_density(support)  # generates the initial density given the the support
    prob_dist /= np.sum(prob_dist)
    parameter_distribution = ParameterDistribution(continuous=True,support=support, prob_dist=prob_dist, sample_generator=prior_sample_generator)


    U = Utility(func=U_func, dfunc=dU_func, parameter_dist=parameter_distribution, linear=True)


    #acquisition = HVI(model=model_f, model_c=model_c , alpha=alpha, space=space, optimizer = acq_opt)
    acquisition = ParEGO(model=model_f, model_c=None , alpha=alpha, space=space, NSGA_based=False,optimizer = acq_opt, utility= U, true_func=f)


    last_step_acquisition = Last_Step(model_f=model_f, model_c=None , true_f=f, true_c=None,n_f=m, n_c=n_c, B=1,acquisition_optimiser = acq_opt, acquisition_f=acquisition,seed=rep,prior_gen=prior_sample_generator, space=space, path=path)

    evaluator = GPyOpt.core.evaluators.Sequential(acquisition)
    bo = BO(model_f, None, space, f, None, acquisition, evaluator, initial_design,  ref_point=ref_point)



    # print("Finished Initialization")
    X, Y, C, Opportunity_cost = bo.run_optimization(max_iter =100,  rep=rep, last_step_evaluator=last_step_acquisition, path=path, verbosity=False)
    print("Code Ended")

    # data = {}
    # data["Opportunity_cost"] = np.array(Opportunity_cost).reshape(-1)
    #
    # gen_file = pd.DataFrame.from_dict(data)
    # folder = "RESULTS"
    # subfolder = "DEB_HVI_"
    # cwd = os.getcwd()
    # print("cwd", cwd)
    # path = cwd + "/" + folder +"/"+ subfolder +'/it_' + str(rep)+ '.csv'
    # if os.path.isdir(cwd + "/" + folder +"/"+ subfolder) == False:
    #     os.makedirs(cwd + "/" + folder +"/"+ subfolder)
    #
    # gen_file.to_csv(path_or_buf=path)

    print("X",X,"Y",Y, "C", C)
import GPy
from multi_objective import MultiObjective
from multi_outputGP import multi_outputGP
from maKG import maKG
from maEI import maEI
from general import unif_2d
from parameter_distribution import ParameterDistribution
from utility import Utility
import ma_bo

# --- Function to optimize
func = GPyOpt.objective_examples.experiments2d.branin()

# --- Attributes
noise_var = [1., 1.]
f = MultiObjective([func.f, func.f], noise_var=noise_var)

# --- Space
space = GPyOpt.Design_space(space=[{
    'name': 'var_1',
    'type': 'continuous',
    'domain': (-5, 10)
}, {
    'name': 'var_2',
    'type': 'continuous',
    'domain': (1, 15)
}])

# --- Model (Multi-output GP)
n_a = 2
model = multi_outputGP(output_dim=n_a, noise_var=noise_var)
def function_caller_test_function_2_penalty(rep):
    for penalty in [-999, 4]:

        noise = 1e-6
        np.random.seed(rep)

        # func2 = dropwave()
        mistery_f =new_brannin(sd=np.sqrt(noise))

        # --- Attributes
        #repeat same objective function to solve a 1 objective problem
        f = MultiObjective([mistery_f.f, mistery_f.f])
        c = MultiObjective([mistery_f.c])

        # --- Attributes
        #repeat same objective function to solve a 1 objective problem

        #c2 = MultiObjective([test_c2])
        # --- Space
        #define space of variables
        space =  GPyOpt.Design_space(space =[{'name': 'var_1', 'type': 'continuous', 'domain': (-5,10)},{'name': 'var_2', 'type': 'continuous', 'domain': (0,15)}])#GPyOpt.Design_space(space =[{'name': 'var_1', 'type': 'continuous', 'domain': (0,100)}])#
        n_f = 1
        n_c = 1
        model_f = multi_outputGP(output_dim = n_f,   noise_var=[noise]*n_f, exact_feval=[True]*n_f)
        model_c = multi_outputGP(output_dim = n_c,  noise_var=[1e-21]*n_c, exact_feval=[True]*n_c)


        # --- Aquisition optimizer
        #optimizer for inner acquisition function
        acq_opt = GPyOpt.optimization.AcquisitionOptimizer(optimizer='lbfgs', space=space, model=model_f, model_c=model_c)
        #
        # # --- Initial design
        #initial design
        initial_design = GPyOpt.experiment_design.initial_design('latin', space, 10)


        acquisition = KG(model=model_f, model_c=model_c , space=space, optimizer = acq_opt)
        evaluator = GPyOpt.core.evaluators.Sequential(acquisition)
        bo = BO(model_f, model_c, space, f, c, acquisition, evaluator, initial_design, penalty_tag="fixed", penalty_value=penalty)


        max_iter  = 35
        # print("Finished Initialization")
        X, Y, C, Opportunity_cost = bo.run_optimization(max_iter = max_iter,verbosity=False)
        print("Code Ended")

        data = {}
        data["Opportunity_cost"] = np.array(Opportunity_cost).reshape(-1)

        gen_file = pd.DataFrame.from_dict(data)
        folder = "RESULTS"
        subfolder = "new_brannin_proposed_" +str(penalty)
        cwd = os.getcwd()
        print("cwd", cwd)
        path = cwd + "/" + folder +"/"+ subfolder +'/it_' + str(rep)+ '.csv'
        if os.path.isdir(cwd + "/" + folder +"/"+ subfolder) == False:
            os.makedirs(cwd + "/" + folder +"/"+ subfolder)

        gen_file.to_csv(path_or_buf=path)

        print("X",X,"Y",Y, "C", C)
def function_caller_mistery_TS(rep):
    np.random.seed(rep)
    for noise in [1e-6, 1.0]:
        # func2 = dropwave()

        mistery_f = mistery(sd=np.sqrt(noise))

        # --- Attributes
        #repeat same objective function to solve a 1 objective problem
        f = MultiObjective([mistery_f.f])
        c = MultiObjective([mistery_f.c])

        # --- Attributes
        #repeat same objective function to solve a 1 objective problem

        #c2 = MultiObjective([test_c2])
        # --- Space
        #define space of variables
        space = GPyOpt.Design_space(
            space=[{
                'name': 'var_1',
                'type': 'continuous',
                'domain': (0, 5)
            }, {
                'name': 'var_2',
                'type': 'continuous',
                'domain': (0, 5)
            }]
        )  #GPyOpt.Design_space(space =[{'name': 'var_1', 'type': 'continuous', 'domain': (0,100)}])#
        n_f = 1
        n_c = 1
        model_f = multi_outputGP(output_dim=n_f,
                                 noise_var=[noise] * n_f,
                                 exact_feval=[True] * n_f)
        model_c = multi_outputGP(output_dim=n_c,
                                 noise_var=[1e-6] * n_c,
                                 exact_feval=[True] * n_c)

        # --- Aquisition optimizer
        #optimizer for inner acquisition function
        acq_opt = GPyOpt.optimization.AcquisitionOptimizer(optimizer='lbfgs',
                                                           space=space,
                                                           model=model_f,
                                                           model_c=model_c)
        #
        # # --- Initial design
        #initial design
        initial_design = GPyOpt.experiment_design.initial_design(
            'latin', space, 10)

        nz = 1
        acquisition = TS(model=model_f,
                         model_c=model_c,
                         nz=nz,
                         space=space,
                         optimizer=acq_opt)
        evaluator = GPyOpt.core.evaluators.Sequential(acquisition)
        bo = BO(model_f,
                model_c,
                space,
                f,
                c,
                acquisition,
                evaluator,
                initial_design,
                expensive=False,
                deterministic=False)

        max_iter = 45
        # print("Finished Initialization")
        X, Y, C, Opportunity_cost = bo.run_optimization(max_iter=max_iter,
                                                        verbosity=False)

        print("Code Ended")

        C_bool = np.product(np.concatenate(C, axis=1) < 0, axis=1)
        data = {}
        # print("C", C)
        # print("np.array(Opportunity_cost).reshape(-1)", np.array(Opportunity_cost).reshape(-1))
        # print("np.array(Y).reshape(-1)", np.array(Y).reshape(-1))
        # print("np.array(C_bool).reshape(-1)", np.array(C_bool).reshape(-1))
        data["X1"] = np.array(X[:, 0]).reshape(-1)
        data["X2"] = np.array(X[:, 1]).reshape(-1)
        data["Opportunity_cost"] = np.concatenate(
            (np.zeros(10), np.array(Opportunity_cost).reshape(-1)))
        data["Y"] = np.array(Y).reshape(-1)
        data["C_bool"] = np.array(C_bool).reshape(-1)
        gen_file = pd.DataFrame.from_dict(data)
        folder = "RESULTS"
        subfolder = "Mistery_TS" + str(noise)
        cwd = os.getcwd()

        path = cwd + "/" + folder + "/" + subfolder + '/it_' + str(
            rep) + '.csv'
        print("path", path)
        if os.path.isdir(cwd + "/" + folder + "/" + subfolder) == False:
            os.makedirs(cwd + "/" + folder + "/" + subfolder)

        gen_file.to_csv(path_or_buf=path)

        print("X", X, "Y", Y, "C", C)
def SRN_function_caller_test(rep):

    penalty = 0
    noise = 1e-4
    alpha = 1.95
    np.random.seed(rep)
    folder = "RESULTS"
    subfolder = "SRN_KG"
    cwd = os.getcwd()
    path = cwd + "/" + folder + "/" + subfolder

    # func2 = dropwave()
    SRN_func = SRN(sd=np.sqrt(noise))
    ref_point = SRN_func.ref_point

    # --- Attributes
    #repeat same objective function to solve a 1 objective problem
    f = MultiObjective([SRN_func.f1, SRN_func.f2])
    c = MultiObjective([SRN_func.c1, SRN_func.c2])

    # --- Attributes
    #repeat same objective function to solve a 1 objective problem

    #c2 = MultiObjective([test_c2])
    # --- Space
    #define space of variables
    space = GPyOpt.Design_space(
        space=[{
            'name': 'var_1',
            'type': 'continuous',
            'domain': (-20, 20)
        }, {
            'name': 'var_2',
            'type': 'continuous',
            'domain': (-20, 20)
        }]
    )  #GPyOpt.Design_space(space =[{'name': 'var_1', 'type': 'continuous', 'domain': (0,100)}])#
    n_f = 2
    n_c = 2
    input_d = 2
    m = n_f

    model_f = multi_outputGP(output_dim=n_f,
                             noise_var=[noise] * n_f,
                             exact_feval=[True] * n_f)
    model_c = multi_outputGP(output_dim=n_c,
                             noise_var=[1e-21] * n_c,
                             exact_feval=[True] * n_c)

    # --- Aquisition optimizer
    # optimizer for inner acquisition function
    acq_opt = GPyOpt.optimization.AcquisitionOptimizer(
        optimizer='lbfgs',
        space=space,
        model=model_f,
        model_c=model_c,
        NSGA_based=False,
        analytical_gradient_prediction=True)

    # --- Initial design
    # initial design
    initial_design = GPyOpt.experiment_design.initial_design(
        'latin', space, 40)  # 2*(input_d+1))

    # --- Utility function
    def prior_sample_generator(n_samples=1, seed=None):
        if seed is None:
            samples = np.random.dirichlet(np.ones((m, )), n_samples)
        else:
            random_state = np.random.RandomState(seed)
            samples = random_state.dirichlet(np.ones((m, )), n_samples)
        return samples

    def prior_density(x):
        assert x.shape[1] == m, "wrong dimension"
        output = np.zeros(x.shape[0])
        for i in range(len(output)):
            output[i] = dirichlet.pdf(x=x[i], alpha=np.ones((m, )))
        return output.reshape(-1)

    def U_func(parameter, y):
        return np.dot(parameter, y)

    def dU_func(parameter, y):
        return parameter

    ##### Utility
    n_samples = 5
    support = prior_sample_generator(
        n_samples=n_samples
    )  # generates the support to marginalise the parameters for acquisition function inside the optimisation process. E_theta[EI(x)]

    prob_dist = prior_density(
        support)  # generates the initial density given the the support
    prob_dist /= np.sum(prob_dist)
    parameter_distribution = ParameterDistribution(
        continuous=True, sample_generator=prior_sample_generator)

    U = Utility(func=U_func,
                dfunc=dU_func,
                parameter_dist=parameter_distribution,
                linear=True)

    # acquisition = HVI(model=model_f, model_c=model_c , alpha=alpha, space=space, optimizer = acq_opt)
    acquisition = AcquisitionUKG(model=model_f,
                                 model_c=model_c,
                                 alpha=alpha,
                                 space=space,
                                 optimizer=acq_opt,
                                 utility=U,
                                 true_func=f)
    last_step_acquisition = Last_Step(model_f=model_f,
                                      model_c=model_c,
                                      true_f=f,
                                      true_c=c,
                                      n_f=n_f,
                                      n_c=n_c,
                                      acquisition_optimiser=acq_opt,
                                      seed=rep,
                                      path=path)

    evaluator = GPyOpt.core.evaluators.Sequential(acquisition)
    # GPyOpt.core.evaluators.Sequential(last_step_acquisition)
    bo = BO(model_f,
            model_c,
            space,
            f,
            c,
            acquisition,
            evaluator,
            initial_design,
            ref_point=ref_point)

    max_iter = 25
    # print("Finished Initialization")
    X, Y, C, Opportunity_cost = bo.run_optimization(
        max_iter=max_iter,
        rep=rep,
        last_step_evaluator=last_step_acquisition,
        path=path,
        verbosity=True)
    print("Code Ended")

    print("X", X, "Y", Y, "C", C)
Пример #8
0
    return hX


parameter = np.atleast_1d([1.25, 1.50, 1.25, 1.50])


def g(y):
    return np.squeeze(np.sum(-np.exp(y), axis=0))


def f(X):
    return g(h(X))


# --- Objective
objective = MultiObjective([f], as_list=True, output_dim=1)

# --- Space
space = GPyOpt.Design_space(space=[{
    'name': 'var',
    'type': 'continuous',
    'domain': (0, 1),
    'dimensionality': d
}])

# --- Model
model = multi_outputGP(output_dim=1, exact_feval=[True], fixed_hyps=False)

# --- Initial design
initial_design = GPyOpt.experiment_design.initial_design(
    'random', space, 2 * (d + 1))
Пример #9
0

def hartman(X):
    X = np.atleast_2d(X)
    fX = np.zeros((X.shape[0], 1))
    for i in range(X.shape[0]):
        for j in range(4):
            aux = 0
            for k in range(X.shape[1]):
                aux -= A[j, k] * (X[i, k] - P[j, k])**2
            fX[i, 0] += alpha[j] * np.exp(aux)
    return fX


# --- Objective
objective = MultiObjective([hartman], as_list=True, output_dim=1)

# --- Space
space = GPyOpt.Design_space(space=[{
    'name': 'var',
    'type': 'continuous',
    'domain': (0, 1),
    'dimensionality': 6
}])

# --- Model
model = multi_outputGP(output_dim=1, exact_feval=[True], fixed_hyps=False)

# --- Initial design
initial_design = GPyOpt.experiment_design.initial_design('random', space, 14)
import numpy as np
import GPyOpt
import GPy
from multi_objective import MultiObjective
from multi_outputGP import multi_outputGP
from maKG import maKG
from maEI import maEI
from parameter_distribution import ParameterDistribution
from utility import Utility
import ma_bo

# --- Attributes
f1 = GPyOpt.objective_examples.experiments1d.forrester().f
f2 = lambda x: -10 * x**2 - np.sin(x)
noise_var = [1., 1.]
f = MultiObjective([f1, f2], noise_var=noise_var)

# --- Space
space = GPyOpt.Design_space(space=[{
    'name': 'var_1',
    'type': 'continuous',
    'domain': (0, 1)
}])

# --- Model (Multi-output GP)
n_a = 2
model = multi_outputGP(output_dim=n_a, noise_var=noise_var)

# --- Aquisition optimizer
acq_opt_maKG = GPyOpt.optimization.AcquisitionOptimizer(optimizer='lbfgs',
                                                        space=space)
def function_caller_1DGP(rep):
    np.random.seed(rep)

    class GP_test():
        """
    A toy function GP

    ARGS
     min: scalar defining min range of inputs
     max: scalar defining max range of inputs
     seed: int, RNG seed
     x_dim: designs dimension
     a_dim: input dimensions
     xa: n*d matrix, points in space to eval testfun
     NoiseSD: additive gaussaint noise SD

    RETURNS
     output: vector of length nrow(xa)
     """

        def __init__(self, xamin, xamax, seed=11, x_dim=1):
            self.seed = seed
            self.dx = x_dim
            self.da = 0
            self.dxa = x_dim
            self.xmin = np.array([xamin for i in range(self.dxa)])
            self.xmax = np.array([xamax for i in range(self.dxa)])
            vr = 4.
            ls = 10
            self.HP =  [vr,ls]
            self.KERNEL = GPy.kern.RBF(input_dim=self.dxa, variance=vr, lengthscale=([ls] * self.dxa), ARD=True)
            self.generate_function()

        def __call__(self, xa, noise_std=1e-2):
            assert len(xa.shape) == 2, "xa must be an N*d matrix, each row a d point"
            assert xa.shape[1] == self.dxa, "Test_func: wrong dimension inputed"

            xa = self.check_input(xa)

            ks = self.KERNEL.K(xa, self.XF)
            out = np.dot(ks, self.invCZ)

            E = np.random.normal(0, noise_std, xa.shape[0])

            return (out.reshape(-1, 1) + E.reshape(-1, 1))

        def generate_function(self):
            print("Generating test function")
            np.random.seed(self.seed)

            self.XF = np.random.uniform(size=(50, self.dxa)) * (self.xmax - self.xmin) + self.xmin


            mu = np.zeros(self.XF.shape[0])

            C = self.KERNEL.K(self.XF, self.XF)

            Z = np.random.multivariate_normal(mu, C).reshape(-1, 1)
            invC = np.linalg.inv(C + np.eye(C.shape[0]) * 1e-3)

            self.invCZ = np.dot(invC, Z)

        def check_input(self, x):
            if not x.shape[1] == self.dxa or (x > self.xmax).any() or (x < self.xmin).any():
                raise ValueError("x is wrong dim or out of bounds")
            return x


    # func2 = dropwave()
    GP_test_f = GP_test(xamin= 0, xamax=100, seed=1)
    GP_test_c = GP_test(xamin= 0, xamax=100, seed=2)

    # --- Attributes
    #repeat same objective function to solve a 1 objective problem
    f = MultiObjective([GP_test_f ])
    c = MultiObjective([GP_test_c ])

    # --- Attributes
    #repeat same objective function to solve a 1 objective problem

    #c2 = MultiObjective([test_c2])
    # --- Space
    #define space of variables
    space =  GPyOpt.Design_space(space =[{'name': 'var_1', 'type': 'continuous', 'domain': (0,100)}])#  , {'name': 'var_2', 'type': 'continuous', 'domain': (0,100)}])#
    n_f = 1
    n_c = 1
    model_f = multi_outputGP(output_dim = n_f,   noise_var=[1e-4]*n_f, exact_feval=[True]*n_f)
    model_c = multi_outputGP(output_dim = n_c,  noise_var=[1e-4]*n_c, exact_feval=[True]*n_c)


    # --- Aquisition optimizer
    #optimizer for inner acquisition function
    acq_opt = GPyOpt.optimization.AcquisitionOptimizer(optimizer='lbfgs', inner_optimizer='lbfgs', space=space, model = model_f)
    #
    # # --- Initial design
    #initial design
    initial_design = GPyOpt.experiment_design.initial_design('latin', space, 15)


    for nz in [2,5,10,15]:
        nz = 5
        acquisition = KG(model=model_f, model_c=model_c , space=space, optimizer = acq_opt, nz=nz)
        evaluator = GPyOpt.core.evaluators.Sequential(acquisition)
        bo = BO(model_f, model_c, space, f, c, acquisition, evaluator, initial_design)

        max_iter  = 25
        # print("Finished Initialization")
        X, Y, C, Opportunity_cost = bo.run_optimization(max_iter = max_iter,verbosity=False)

        print("Code Ended")

        C_bool = np.product(np.concatenate(C, axis=1) < 0, axis=1)
        data = {}
        # print("C", C)
        # print("np.array(Opportunity_cost).reshape(-1)", np.array(Opportunity_cost).reshape(-1))
        # print("np.array(Y).reshape(-1)", np.array(Y).reshape(-1))
        # print("np.array(C_bool).reshape(-1)", np.array(C_bool).reshape(-1))
        data["X1"] = np.array(X[:,0]).reshape(-1)
        data["X2"] = np.array(X[:,1]).reshape(-1)
        data["Opportunity_cost"] = np.concatenate((np.zeros(10), np.array(Opportunity_cost).reshape(-1)))
        data["Y"] = np.array(Y).reshape(-1)
        data["C_bool"] = np.array(C_bool).reshape(-1)
        gen_file = pd.DataFrame.from_dict(data)
        folder = "RESULTS"
        subfolder = "Mistery_"+str(nz)
        cwd = os.getcwd()

        path = cwd + "/" + folder +"/"+ subfolder +'/it_' + str(rep)+ '.csv'
        print("path", path)
        if os.path.isdir(cwd + "/" + folder +"/"+ subfolder) == False:
            os.makedirs(cwd + "/" + folder +"/"+ subfolder)

        gen_file.to_csv(path_or_buf=path)

        print("X",X,"Y",Y, "C", C)