Example #1
0
    def next(self):
        """Return the next point to evaluate according to acquisition
         function. Returns a subspace index, the index in the given subspace
         and the corresponding learner parameters in a dict which is
         directly unpackable. """
        for i, (mmap, grid, membah) in enumerate(zip(self.maps, self.grids, self.members)):
            # don't do anything fancy if we haven't sampled each classifier
            # at least once or twice, depending
            if len(grid) == 1:
                # placeholder params, just send nothing
                if membah.ndata < 1:
                    return 0, i, {}
            else:
                if membah.ndata < 2:
                    grid_i = np.random.choice(len(grid))
                    return grid_i, i, self.raw_to_learner_params(i, grid_i)

        # get everybody's posteriors
        posteriors = [m.posterior(g) for m, g in zip(self.members, self.grids)]
        mus = np.concatenate([p[0] for p in posteriors])
        s2s = np.concatenate([p[1] for p in posteriors])

        # pick the parameters maximizing the acquisition function
        acq = ei.expected_improvement(mus, s2s, self.scores)
        # handle cases where all acq are the same
        global_i = util.eq_rand_idx(acq, np.max(acq))
        member_i, grid_i = self.which_member(global_i)

        learner_params = self.raw_to_learner_params(member_i, grid_i)
        return grid_i, member_i, learner_params
Example #2
0
    def next(self):
        '''Return the next point to evaluate according to acquisition
         function. Returns a subspace index, the index in the given subspace
         and the corresponding learner parameters in a dict which is
         directly unpackable. '''
        if self.gp.ndata < 2:
            # not enough points pick random parameters
            grid_i = np.random.choice(len(self.space))
        else:
            # optimize the model
            optimization.optimize_random_start(self.gp, self.gp_priors)
            # pick the parameters maximizing the acquisition function
            mu, var = self.gp.posterior(self.space)
            acq = ei.expected_improvement(mu, var, self.gp.data[1])
            grid_i = util.eq_rand_idx(acq, np.max(acq))

        learner_i, learner_params = self.raw_to_learner_params(
            self.space[grid_i])
        return grid_i, learner_i, learner_params