Esempio n. 1
0
File: bbo.py Progetto: kaghog/octras
    def initialize(self):
        parameter_space = ParameterSpace([
            ContinuousParameter("x%d" % index, bounds[0], bounds[1])
            for index, bounds in enumerate(self.problem.bounds)
        ] + [InformationSourceParameter(len(self.problem.fidelities))])

        # Obtain initial sample
        design = LatinDesign(parameter_space)
        initial_parameters = design.get_samples(self.initial_sample_count)
        initial_response = self._evaluate_batch(initial_parameters)

        kernels = [GPy.kern.RBF(1)] * len(self.problem.fidelities)
        kernel = emukit.multi_fidelity.kernels.LinearMultiFidelityKernel(kernels)

        model = GPyLinearMultiFidelityModel(
            initial_parameters, initial_response,
            kernel, n_fidelities = len(self.problem.fidelities)
        )

        model = GPyMultiOutputWrapper(model, len(self.problem.fidelities))
        acquisition = NegativeLowerConfidenceBound(model)

        self.loop = BayesianOptimizationLoop(
            model = model, space = parameter_space,
            acquisition = acquisition, batch_size = self.batch_size
        )
Esempio n. 2
0
File: bbo.py Progetto: kaghog/octras
    def initialize(self):
        parameter_space = ParameterSpace([
            ContinuousParameter("x%d" % index, bounds[0], bounds[1])
            for index, bounds in enumerate(self.problem.bounds)
        ])

        # Obtain initial sample
        design = LatinDesign(parameter_space)
        initial_parameters = design.get_samples(self.initial_sample_count)
        initial_response = self._evaluate_batch(initial_parameters)

        kernel = None # GPy.kern.RBF(1)
        model = GPy.models.GPRegression(initial_parameters, initial_response, kernel)
        model = GPyModelWrapper(model)

        acquisition = NegativeLowerConfidenceBound(model)

        self.loop = BayesianOptimizationLoop(
            model = model, space = parameter_space,
            acquisition = acquisition, batch_size = self.batch_size
        )
Esempio n. 3
0
def negative_lower_confidence_bound_acquisition(gpy_model):
    return NegativeLowerConfidenceBound(gpy_model)
Esempio n. 4
0
    model = RandomForest(X_init=X_init, Y_init=Y_init)
    with_gradients = False

elif args.model_type == "dngo":
    model = DNGO(X_init=X_init, Y_init=Y_init)
    with_gradients = False

elif args.model_type == "gp":
    model = BOGP(X_init=X_init, Y_init=Y_init)

if args.acquisition_type == "ei":
    acquisition = ExpectedImprovement(model)
elif args.acquisition_type == "pi":
    acquisition = ProbabilityOfImprovement(model)
elif args.acquisition_type == "nlcb":
    acquisition = NegativeLowerConfidenceBound(model)
elif args.acquisition_type == "logei":
    acquisition = LogExpectedImprovement(model)
elif args.acquisition_type == "entropy_search":
    model = BOGP(X_init=X_init, Y_init=Y_init)
    acquisition = EntropySearch(model, space=space)


# if with_gradients:
#    acquisition_optimizer = AcquisitionOptimizer(space)
# else:
acquisition_optimizer = DirectOptimizer(space)

candidate_point_calculator = Sequential(acquisition, acquisition_optimizer)

bo = BayesianOptimizationLoop(model=model, space=space, X_init=X_init, Y_init=Y_init, acquisition=acquisition,
def acquisition():
    rng = np.random.RandomState(42)
    x_init = rng.rand(5, 2)
    y_init = rng.rand(5, 1)
    model = GPRegression(x_init, y_init, RBF(2))
    return NegativeLowerConfidenceBound(GPyModelWrapper(model))