Пример #1
0
    def __init__(self,
                 space,
                 model,
                 utility,
                 expectation_utility=None,
                 optimizer='lbfgs',
                 inner_optimizer='lbfgs',
                 parallel=False,
                 n_starting=400,
                 n_anchor=20,
                 include_baseline_points=False,
                 **kwargs):

        self.space = space
        self.model = model
        self.utility = utility
        self.expectation_utility = expectation_utility
        self.optimizer_name = optimizer
        self.inner_optimizer_name = inner_optimizer
        self.parallel = parallel
        self.n_starting = n_starting
        self.n_anchor = n_anchor
        self.include_baseline_points = include_baseline_points
        self.number_of_utility_parameter_samples = 3
        self.full_parameter_support = self.utility.parameter_distribution.use_full_support
        self.number_of_gp_hyps_samples = min(
            10, self.model.number_of_hyps_samples())
        self.n_attributes = self.model.output_dim
        self.kwargs = kwargs

        ## -- save extra options than can be passed to the optimizer
        if 'model' in self.kwargs:
            self.model = self.kwargs['model']

        if 'anchor_points_logic' in self.kwargs:
            self.type_anchor_points_logic = self.kwargs[
                'type_anchor_points_logic']
        else:
            self.type_anchor_points_logic = max_objective_anchor_points_logic

        ## -- Context handler: takes
        self.context_manager = ContextManager(space)
        ## -- Set optimizer and inner optimizer (WARNING: this won't update context)
        self.optimizer = choose_optimizer(
            self.optimizer_name, self.context_manager.noncontext_bounds)
        self.inner_optimizer = choose_optimizer(
            self.inner_optimizer_name, self.context_manager.noncontext_bounds)
Пример #2
0
 def __init__(self, model, space, optimizer, utility):
     super(uTS, self).__init__(model, space)
     self.optimizer_name = optimizer
     self.utility = utility
     #
     self.context_manager = ContextManager(self.space)
     self.optimizer = choose_optimizer(self.optimizer_name, self.context_manager.noncontext_bounds)
     self.X_aux = None
     self.Y_aux = None
Пример #3
0
    def optimize_inner_func(self,
                            f=None,
                            df=None,
                            f_df=None,
                            duplicate_manager=None,
                            n_starting=400,
                            n_anchor=20):
        """
        Optimizes the input function.

        :param f: function to optimize.
        :param df: gradient of the function to optimize.
        :param f_df: returns both the function to optimize and its gradient.

        """
        self.f = f
        self.df = df
        self.f_df = f_df

        # Update the optimizer, in case context has beee passed.
        self.inner_optimizer = choose_optimizer(
            self.inner_optimizer_name, self.context_manager.noncontext_bounds)

        # Selecting the anchor points and removing duplicates
        if self.type_anchor_points_logic == max_objective_anchor_points_logic:
            anchor_points_generator = ObjectiveAnchorPointsGenerator(
                self.space, latin_design_type, f, n_starting)
        elif self.type_anchor_points_logic == thompson_sampling_anchor_points_logic:
            anchor_points_generator = ThompsonSamplingAnchorPointsGenerator(
                self.space, sobol_design_type, self.model)

        # Select the anchor points (with context)
        anchor_points, anchor_points_values = anchor_points_generator.get(
            num_anchor=n_anchor,
            duplicate_manager=duplicate_manager,
            context_manager=self.context_manager,
            get_scores=True)

        # Applying local optimizers at the anchor points and update bounds of the optimizer (according to the context)
        optimized_points = [
            apply_optimizer_inner(self.inner_optimizer,
                                  a,
                                  f=f,
                                  df=None,
                                  f_df=f_df,
                                  duplicate_manager=duplicate_manager,
                                  context_manager=self.context_manager,
                                  space=self.space) for a in anchor_points
        ]
        x_min, fx_min = min(optimized_points, key=lambda t: t[1])
        #x_min = np.atleast_2d(anchor_points[0])
        #fx_min = np.atleast_2d(anchor_points_values[0])
        return x_min, fx_min
Пример #4
0
    def optimize(self, f=None, df=None, f_df=None, duplicate_manager=None):
        """
        Optimizes the input function.

        :param f: function to optimize.
        :param df: gradient of the function to optimize.
        :param f_df: returns both the function to optimize and its gradient.

        """
        self.f = f
        self.df = df
        self.f_df = f_df

        # Update the optimizer, in case context has beee passed.
        self.optimizer = choose_optimizer(
            self.optimizer_name, self.context_manager.noncontext_bounds)

        # Selecting the anchor points and removing duplicates
        if self.type_anchor_points_logic == max_objective_anchor_points_logic:
            anchor_points_generator = ObjectiveAnchorPointsGenerator(
                self.space, random_design_type, f, self.n_starting)
        elif self.type_anchor_points_logic == thompson_sampling_anchor_points_logic:
            anchor_points_generator = ThompsonSamplingAnchorPointsGenerator(
                self.space, sobol_design_type, self.model)

        # Select the anchor points (with context)
        anchor_points, anchor_points_values = anchor_points_generator.get(
            num_anchor=self.n_anchor,
            duplicate_manager=duplicate_manager,
            context_manager=self.context_manager,
            get_scores=True)

        # Baseline points
        if self.include_baseline_points:
            X_baseline = []
            if self.full_parameter_support:
                utility_parameter_samples = self.utility.parameter_distribution.support
            else:
                utility_parameter_samples = self.utility.parameter_distribution.sample(
                    self.number_of_utility_parameter_samples)
            for i in range(len(utility_parameter_samples)):
                marginal_argmax = self._current_marginal_argmax(
                    utility_parameter_samples[i])
                X_baseline.append(marginal_argmax[0, :])
            X_baseline = np.atleast_2d(X_baseline)
            fX_baseline = f(X_baseline)[:, 0]
            anchor_points = np.vstack((anchor_points, X_baseline))
            anchor_points_values = np.concatenate(
                (anchor_points_values, fX_baseline))
        print('Anchor points:')
        print(anchor_points)
        print('Anchor points values:')
        print(anchor_points_values)

        if self.parallel:
            pool = Pool(4)
            optimized_points = pool.map(self._parallel_optimization_wrapper,
                                        anchor_points)
            print('optimized points (parallel):')

        else:
            optimized_points = [
                apply_optimizer(self.optimizer,
                                a,
                                f=f,
                                df=None,
                                f_df=f_df,
                                duplicate_manager=duplicate_manager,
                                context_manager=self.context_manager,
                                space=self.space) for a in anchor_points
            ]
            print('Optimized points (sequential):')
        print(optimized_points)
        x_min, fx_min = min(optimized_points, key=lambda t: t[1])
        fx_min = np.squeeze(fx_min)

        if self.include_baseline_points:
            fx_min_baseline = fX_baseline.min()
            if fx_min_baseline < fx_min:
                print('Baseline point was best found.')
                optimal_indices = np.atleast_1d(np.argmin(fX_baseline))
                index = random.choice(optimal_indices)
                x_min = np.atleast_2d(X_baseline[index, :])
                fx_min = fX_baseline[index]
            elif fx_min_baseline == fx_min:
                print('Baseline point is a good as best optimized point.')
                use_baseline_point = np.random.binomial(1, 0.5, 1)
                if use_baseline_point:
                    print('Baseline point will be used.')
                    optimal_indices = np.atleast_1d(np.argmin(fX_baseline))
                    index = random.choice(optimal_indices)
                    x_min = np.atleast_2d(X_baseline[index, :])
                    fx_min = fX_baseline[index]

        print('Acquisition value of selected point: {}'.format(fx_min))
        return x_min, fx_min