Пример #1
0
    def _get_likelihood_matrix(self, train_data_good, train_data_bad,
                               kdes_good, kdes_bad,
                               kde_configspaces):  # datapoints x models
        likelihood_matrix = np.empty(
            (train_data_good.shape[0] + train_data_bad.shape[0],
             len(kdes_good)))
        for i, (good_kde, bad_kde, kde_configspace) in enumerate(
                zip(kdes_good, kdes_bad, kde_configspaces)):
            train_data_good_compatible = train_data_good
            train_data_bad_compatible = train_data_bad

            # compute likelihood of kde given observation
            pdf = KDEMultivariate.pdf  # leave_given_out_pdf
            if not self.warmstarted_model.is_current_kde(i):
                imputer = BOHB(kde_configspace).impute_conditional_data
                train_data_good_compatible = make_vector_compatible(
                    train_data_good, self.configspace, kde_configspace,
                    imputer)
                train_data_bad_compatible = make_vector_compatible(
                    train_data_bad, self.configspace, kde_configspace, imputer)
                pdf = KDEMultivariate.pdf

            good_kde_likelihoods = np.nan_to_num(
                pdf(good_kde, train_data_good_compatible))
            bad_kde_likelihoods = np.nan_to_num(
                pdf(bad_kde, train_data_bad_compatible))
            likelihood_matrix[:, i] = np.append(good_kde_likelihoods,
                                                bad_kde_likelihoods)
        return likelihood_matrix
Пример #2
0
    def __init__(self, config):
        super(BOHBProposer, self).__init__(config)
        self.tid = 0
        self.target = 1 if config[
            'target'] == min else -1  # bohb targets for loss only

        # BOHB config parameters
        set_default_keyvalue("n_iterations", 4, config)
        for k, v in BOHB_DEFAULT.items():
            set_default_keyvalue(k, v, config)
        if not config['min_points_in_model']:
            config['min_points_in_model'] = None
        # hyperband related parameters
        for k, v in SH_DEFAULT.items():
            set_default_keyvalue(k, v, config)
        # Hyperband related settings - modified from hpbandster/optimizers/bohb.py
        self.eta = config['eta']
        self.min_budget = config['min_budget']
        self.max_budget = config['max_budget']
        self.max_SH_iter = -int(
            np.log(self.min_budget / self.max_budget) / np.log(self.eta)) + 1
        self.budgets = self.max_budget * np.power(
            self.eta, -np.linspace(self.max_SH_iter - 1, 0, self.max_SH_iter))
        self.n_iterations = config['n_iterations']

        self.nSamples = self._get_nSample()
        bohb_config = {k: config[k] for k in BOHB_DEFAULT}
        configspace = self.create_configspace(config['parameter_config'])

        self.config_generator = BOHB(configspace, **bohb_config)

        ## Based on master.py
        self.iterations = []
        self.running_jobs = {}
Пример #3
0
    def setup_bohb(self):
        from hpbandster.optimizers.config_generators.bohb import BOHB

        if self._mode == "max":
            self._metric_op = -1.
        elif self._mode == "min":
            self._metric_op = 1.

        bohb_config = self._bohb_config or {}
        self.bohber = BOHB(self._space, **bohb_config)
    def __init__(self,
                 configspace=None,
                 eta=3,
                 min_budget=0.01,
                 max_budget=1,
                 min_points_in_model=None,
                 top_n_percent=15,
                 num_samples=64,
                 random_fraction=1 / 3,
                 bandwidth_factor=3,
                 min_bandwidth=1e-3,
                 **kwargs):
        # TODO: Proper check for ConfigSpace object!
        if configspace is None:
            raise ValueError("You have to provide a valid CofigSpace object")

        cg = BOHB(configspace=configspace,
                  min_points_in_model=min_points_in_model,
                  top_n_percent=top_n_percent,
                  num_samples=num_samples,
                  random_fraction=random_fraction,
                  bandwidth_factor=bandwidth_factor,
                  min_bandwidth=min_bandwidth)

        super().__init__(config_generator=cg, **kwargs)

        # Hyperband related stuff
        self.eta = eta
        self.min_budget = min_budget
        self.max_budget = max_budget

        # precompute some HB stuff
        self.max_SH_iter = -int(
            np.log(min_budget / max_budget) / np.log(eta)) + 1
        self.budgets = max_budget * np.power(
            eta, -np.linspace(self.max_SH_iter - 1, 0, self.max_SH_iter))

        self.config.update({
            'eta': eta,
            'min_budget': min_budget,
            'max_budget': max_budget,
            'budgets': self.budgets,
            'max_SH_iter': self.max_SH_iter,
            'min_points_in_model': min_points_in_model,
            'top_n_percent': top_n_percent,
            'num_samples': num_samples,
            'random_fraction': random_fraction,
            'bandwidth_factor': bandwidth_factor,
            'min_bandwidth': min_bandwidth
        })
Пример #5
0
    def _setup_bohb(self):
        from hpbandster.optimizers.config_generators.bohb import BOHB

        if self._metric is None and self._mode:
            # If only a mode was passed, use anonymous metric
            self._metric = DEFAULT_METRIC

        if self._mode == "max":
            self._metric_op = -1.
        elif self._mode == "min":
            self._metric_op = 1.

        if self._seed is not None:
            self._space.seed(self._seed)

        bohb_config = self._bohb_config or {}
        self.bohber = BOHB(self._space, **bohb_config)
Пример #6
0
    def test_imputation_conditional_spaces(self):

        bohb = BOHB(self.configspace, random_fraction=0)

        raw_array = []

        for i in range(128):

            config = self.configspace.sample_configuration()
            raw_array.append(config.get_array())
            imputed_array = bohb.impute_conditional_data(np.array(raw_array))
            self.assertFalse(np.any(np.isnan(imputed_array)))
            job = Job(i, budget=1, config=config)
            job.result = {'loss': np.random.rand(), 'info': {}}
            bohb.new_result(job)

        for j in range(64):
            conf, info = bohb.get_config(1)
            self.assertTrue(info['model_based_pick'])
Пример #7
0
 def __init__(self,
              space,
              bohb_config=None,
              max_concurrent=10,
              metric="neg_mean_loss",
              mode="max"):
     from hpbandster.optimizers.config_generators.bohb import BOHB
     assert BOHB is not None, "HpBandSter must be installed!"
     assert mode in ["min", "max"], "`mode` must be 'min' or 'max'!"
     self._max_concurrent = max_concurrent
     self.trial_to_params = {}
     self.running = set()
     self.paused = set()
     self._metric = metric
     if mode == "max":
         self._metric_op = -1.
     elif mode == "min":
         self._metric_op = 1.
     bohb_config = bohb_config or {}
     self.bohber = BOHB(space, **bohb_config)
     super(TuneBOHB, self).__init__(metric=self._metric, mode=mode)