예제 #1
0
 def _resolve_acquisition_method(self, acquisition_method):
     """Resolve acquisition method."""
     if acquisition_method is None:
         return LCBSC(model=self.target_model,
                      prior=self.prior,
                      noise_var=self.acq_noise_var,
                      exploration_rate=self.exploration_rate,
                      seed=self.seed,
                      include_prior=True)
     if isinstance(acquisition_method, AcquisitionBase):
         return acquisition_method
     raise TypeError(
         'acquisition_method must be an instance of AcquisitionBase.')
예제 #2
0
    def _acquisition(self, gp):
        if self.params.sampling_type == "uniform":
            return UniformAcquisition(model=gp)


#        if self.params.sampling_type == "grid":
#            return GridAcquisition(tics=self.params.grid_tics,
#                                   model=gp)
        if self.params.sampling_type == "bo":
            return GPLCA(LCBSC(delta=self.params.acq_delta,
                               max_opt_iters=self.params.acq_opt_iterations,
                               noise_var=self.params.acq_noise_cov,
                               model=gp),
                         L=self.params.L)
        logger.critical("Unknown sampling type '{}', aborting!".format(
            self.params.sampling_type))
        assert False
예제 #3
0
class BayesianOptimization(ParameterInference):
    """Bayesian Optimization of an unknown target function."""

    def __init__(self,
                 model,
                 target_name=None,
                 bounds=None,
                 initial_evidence=None,
                 update_interval=10,
                 target_model=None,
                 acquisition_method=None,
                 acq_noise_var=0,
                 exploration_rate=10,
                 batch_size=1,
                 batches_per_acquisition=None,
                 async=False,
                 **kwargs):
        """Initialize Bayesian optimization.

        Parameters
        ----------
        model : ElfiModel or NodeReference
        target_name : str or NodeReference
            Only needed if model is an ElfiModel
        bounds : dict, optional
            The region where to estimate the posterior for each parameter in
            model.parameters: dict('parameter_name':(lower, upper), ... )`. Not used if
            custom target_model is given.
        initial_evidence : int, dict, optional
            Number of initial evidence or a precomputed batch dict containing parameter
            and discrepancy values. Default value depends on the dimensionality.
        update_interval : int, optional
            How often to update the GP hyperparameters of the target_model
        target_model : GPyRegression, optional
        acquisition_method : Acquisition, optional
            Method of acquiring evidence points. Defaults to LCBSC.
        acq_noise_var : float or np.array, optional
            Variance(s) of the noise added in the default LCBSC acquisition method.
            If an array, should be 1d specifying the variance for each dimension.
        exploration_rate : float, optional
            Exploration rate of the acquisition method
        batch_size : int, optional
            Elfi batch size. Defaults to 1.
        batches_per_acquisition : int, optional
            How many batches will be requested from the acquisition function at one go.
            Defaults to max_parallel_batches.
        async : bool, optional
            Allow acquisitions to be made asynchronously, i.e. do not wait for all the
            results from the previous acquisition before making the next. This can be more
            efficient with a large amount of workers (e.g. in cluster environments) but
            forgoes the guarantee for the exactly same result with the same initial
            conditions (e.g. the seed). Default False.
        **kwargs

        """
        model, target_name = self._resolve_model(model, target_name)
        output_names = [target_name] + model.parameter_names
        super(BayesianOptimization, self).__init__(
            model, output_names, batch_size=batch_size, **kwargs)

        target_model = target_model or GPyRegression(self.model.parameter_names, bounds=bounds)

        self.target_name = target_name
        self.target_model = target_model

        n_precomputed = 0
        n_initial, precomputed = self._resolve_initial_evidence(initial_evidence)
        if precomputed is not None:
            params = batch_to_arr2d(precomputed, self.parameter_names)
            n_precomputed = len(params)
            self.target_model.update(params, precomputed[target_name])

        self.batches_per_acquisition = batches_per_acquisition or self.max_parallel_batches
        self.acquisition_method = acquisition_method or LCBSC(self.target_model,
                                                              prior=ModelPrior(self.model),
                                                              noise_var=acq_noise_var,
                                                              exploration_rate=exploration_rate,
                                                              seed=self.seed)

        self.n_initial_evidence = n_initial
        self.n_precomputed_evidence = n_precomputed
        self.update_interval = update_interval
        self.async = async
예제 #4
0
     surrogate = 'LV-' if LVlayer else ''
     surrogate += str(GPlayers + 1) + '*GP'
     target_model = DGPRegression(parameter_names=par_names,
                                  bounds=bounds,
                                  GPlayers=GPlayers,
                                  LVlayer=LVlayer,
                                  Ms=50,
                                  IW_samples=5,
                                  pred_samples=20,
                                  opt_steps=20000,
                                  q=q)
     meth += '-' + surrogate
     meth += '-' + str(q)
 meth += '(' + str(evidence) + ')'
 acq = LCBSC(target_model,
             noise_var=noise_var,
             exploration_rate=10,
             seed=seed)
 bolfi = elfi.BOLFI(elfi_model,
                    'dist',
                    batch_size=5,
                    initial_evidence=init_ev,
                    update_interval=init_ev,
                    target_model=target_model,
                    acquisition_method=acq,
                    seed=seed)
 # conduct inference
 post, mses = bolfi.fit(n_evidence=evidence, bar=False)
 res = bolfi.extract_result()
 samples, weights = get_weighted_samples(post, N=100000)
 samples_df = None
 # posterior_samples = sample_posterior(samples, weights, cols=par_names, N=10000)