Esempio n. 1
0
    def __init__(self, space, outer_optimizer='lbfgs', inner_optimizer='lbfgs', **kwargs):

        self.space = space.decision_context_space
        self.inner_space = space.decision_space
        self.optimizer_name = outer_optimizer
        self.inner_optimizer_name = inner_optimizer
        self.kwargs = kwargs
        
        ## -- Baseline points
        self.baseline_points = None

        ## -- save extra options than can be passed to the optimizer
        if 'model' in self.kwargs:
            self.model = self.kwargs['model']

        if 'anchor_points_logic' in self.kwargs:
            self.type_anchor_points_logic = self.kwargs['type_anchor_points_logic']
        else:
            self.type_anchor_points_logic = max_objective_anchor_points_logic

        ## -- Context handler: takes
        self.context_manager = ContextManager(self.space)
        self.inner_context_manager = ContextManager(self.inner_space)
        ## -- Set optimizer and inner optimizer (WARNING: this won't update context)
        self.optimizer = choose_optimizer(self.optimizer_name, self.context_manager.noncontext_bounds)
        self.inner_optimizer = choose_optimizer(self.inner_optimizer_name, self.inner_context_manager.noncontext_bounds)
        self.verbose = True
Esempio n. 2
0
 def __init__(self,
              model,
              optimization_space,
              optimizer,
              scenario_distribution,
              utility,
              expectation_utility,
              cost_withGradients=None):
     self.optimizer_name = optimizer
     self.scenario_distribution = scenario_distribution
     self.utility = utility
     self.expectation_utility = expectation_utility
     super(TS, self).__init__(model,
                              optimization_space,
                              cost_withGradients=cost_withGradients)
     if cost_withGradients is None:
         self.cost_withGradients = constant_cost_withGradients
     else:
         self.cost_withGradients = cost_withGradients
     #
     self.decision_space = optimization_space.decision_space
     self.decision_space_context_manager = ContextManager(
         self.decision_space)
     self.decision_space_optimizer = choose_optimizer(
         self.optimizer_name,
         self.decision_space_context_manager.noncontext_bounds)
     #
     self.decision_context_space = optimization_space.decision_context_space
     self.decision_context_space_context_manager = ContextManager(
         self.decision_context_space)
     self.decision_context_space_optimizer = choose_optimizer(
         self.optimizer_name,
         self.decision_context_space_context_manager.noncontext_bounds)
     #
     self.utility_prob_dist = self.utility.parameter_distribution
     self.full_scenario_support = True
     if self.full_scenario_support:
         self.scenario_support = scenario_distribution.support
         self.scenario_prob_dist = scenario_distribution.prob_dist
         self.scenario_support_cardinality = len(self.scenario_support)
     self.utility_support = utility.parameter_distribution.support
     self.utility_prob_dist = utility.parameter_distribution.prob_dist
     self.full_utility_support = self.utility.parameter_distribution.use_full_support
     if self.full_utility_support:
         self.utility_support_cardinality = len(self.utility_support)
     self.number_of_gp_hyps_samples = min(
         10, self.model.number_of_hyps_samples())
     self.X_aux = None
     self.Y_aux = None
    def optimize(self, f=None, df=None, f_df=None, duplicate_manager=None):
        """
        Optimizes the input function.
        :param f: function to optimize.
        :param df: gradient of the function to optimize.
        :param f_df: returns both the function to optimize and its gradient.
        """
        self.f = f
        self.df = df
        self.f_df = f_df

        ## --- Update the optimizer, in case context has beee passed.
        self.optimizer = choose_optimizer(
            self.optimizer_name, self.context_manager.noncontext_bounds)
        # print("In FlexibleAcquisitionOptimizer.optimize:")
        # print("     self.context_manager.nocontext_index    : ", self.context_manager.noncontext_index)
        # print("     self.context_manager.nocontext_bounds   : ", self.context_manager.noncontext_bounds)
        # print("     self.context_manager.nocontext_index_obj: ", self.context_manager.nocontext_index_obj)

        ## --- Selecting the anchor points and removing duplicates
        if self.type_anchor_points_logic == max_objective_anchor_points_logic:
            anchor_points_generator = ObjectiveAnchorPointsGenerator(
                self.space, random_design_type, f)
        elif self.type_anchor_points_logic == thompson_sampling_anchor_points_logic:
            anchor_points_generator = ThompsonSamplingAnchorPointsGenerator(
                self.space, sobol_design_type, self.model)

        ## -- Select the anchor points (with context)
        # print("In FlexibleAcquisitionOptimizer.optimize:")
        # print("     self.context_manager.nocontext_index    : ", self.context_manager.noncontext_index)
        # print("     self.context_manager.nocontext_bounds   : ", self.context_manager.noncontext_bounds)
        # print("     self.context_manager.nocontext_index_obj: ", self.context_manager.nocontext_index_obj)
        anchor_points = anchor_points_generator.get(
            duplicate_manager=duplicate_manager,
            context_manager=self.context_manager)

        ## --- Applying local optimizers at the anchor points and update bounds of the optimizer (according to the context)
        optimized_points = [
            apply_optimizer(self.optimizer,
                            a,
                            f=f,
                            df=None,
                            f_df=f_df,
                            duplicate_manager=duplicate_manager,
                            context_manager=self.context_manager,
                            space=self.space) for a in anchor_points
        ]
        x_min, fx_min = min(optimized_points, key=lambda t: t[1])

        # x_min, fx_min = min([apply_optimizer(self.optimizer, a, f=f, df=None, f_df=f_df, duplicate_manager=duplicate_manager, context_manager=self.context_manager, space = self.space) for a in anchor_points], key=lambda t:t[1])

        return x_min, fx_min
Esempio n. 4
0
    def optimize(self, f=None, df=None, f_df=None, duplicate_manager=None, n_starts=80, n_anchor=8):
        """
        Optimizes the input function.

        :param f: function to optimize.
        :param df: gradient of the function to optimize.
        :param f_df: returns both the function to optimize and its gradient.

        """
        self.f = f
        self.df = df
        self.f_df = f_df

        ## --- Update the optimizer, in case context has been passed.
        self.optimizer = choose_optimizer(self.optimizer_name, self.context_manager.noncontext_bounds)

        ## --- Selecting the anchor points and removing duplicates
        if self.type_anchor_points_logic == max_objective_anchor_points_logic:
            anchor_points_generator = ObjectiveAnchorPointsGenerator(self.space, random_design_type, f, n_starts)
        elif self.type_anchor_points_logic == thompson_sampling_anchor_points_logic:
            anchor_points_generator = ThompsonSamplingAnchorPointsGenerator(self.space, sobol_design_type, self.model)

        ## -- Select the anchor points (with context)
        anchor_points, anchor_points_values = anchor_points_generator.get(num_anchor=n_anchor, duplicate_manager=duplicate_manager,
                                                                          context_manager=self.context_manager,
                                                                          get_scores=True)
        if self.baseline_points is not None:
            fX_baseline = f(self.baseline_points)[:, 0]
            anchor_points = np.vstack((anchor_points, np.copy(self.baseline_points)))
            anchor_points_values = np.concatenate((anchor_points_values, fX_baseline))
        
        print('anchor points')
        print(anchor_points)
        print(anchor_points_values)
        parallel = True
        if parallel:
            pool = Pool(4)
            optimized_points = pool.map(self._parallel_optimization_wrapper, anchor_points)
        else:
            optimized_points = [apply_optimizer(self.optimizer, a, f=f, df=None, f_df=f_df, duplicate_manager=duplicate_manager, context_manager=self.context_manager, space=self.space, verbose=self.verbose) for a in anchor_points]
        if False:
            print('gradient test')
            for item in optimized_points:
                x = item[0]
                print(f_df(x)[1])
        x_min, fx_min = min(optimized_points, key=lambda t:t[1])
        if np.asscalar(anchor_points_values[0]) < np.asscalar(fx_min):
            print('anchor_point was best found')
            fx_min = np.atleast_2d(anchor_points_values[0])
            x_min = np.atleast_2d(anchor_points[0])
        return x_min, fx_min
Esempio n. 5
0
    def optimize_inner_func(self, f=None, df=None, f_df=None, duplicate_manager=None, parallel=False, n_starts=80, n_anchor=8):
        """
        Optimizes the input function.

        :param f: function to optimize.
        :param df: gradient of the function to optimize.
        :param f_df: returns both the function to optimize and its gradient.

        """
        self.f = f
        self.df = df
        self.f_df = f_df

        ## --- Update the optimizer, in case context has beee passed.
        self.inner_optimizer = choose_optimizer(self.inner_optimizer_name, self.inner_context_manager.noncontext_bounds)

        ## --- Selecting the anchor points and removing duplicates
        if self.type_anchor_points_logic == max_objective_anchor_points_logic:
            anchor_points_generator = ObjectiveAnchorPointsGenerator(self.inner_space, random_design_type, f, n_starts)
        elif self.type_anchor_points_logic == thompson_sampling_anchor_points_logic:
            anchor_points_generator = ThompsonSamplingAnchorPointsGenerator(self.inner_space, sobol_design_type, self.model)

        ## -- Select the anchor points (with context)
        anchor_points, anchor_points_values = anchor_points_generator.get(num_anchor=n_anchor, duplicate_manager=duplicate_manager,
                                                                          context_manager=self.context_manager,
                                                                         get_scores=True)
        if parallel:
            pool = Pool(4)
            optimized_points = pool.map(self._parallel_inner_optimization_wrapper, anchor_points)
            print('optimized points')
            print(optimized_points)
        else:
            optimized_points = [apply_optimizer_inner(self.inner_optimizer, a, f=f, df=None, f_df=f_df, duplicate_manager=duplicate_manager, context_manager=self.context_manager) for a in anchor_points]
        x_min, fx_min = min(optimized_points, key=lambda t:t[1])
        if np.asscalar(anchor_points_values[0]) < np.asscalar(fx_min):
            #print('anchor_point was best found')
            print(x_min)
            print(fx_min)
            print(anchor_points[0])
            print(anchor_points_values[0])
            fx_min = np.atleast_2d(anchor_points_values[0])
            x_min = np.atleast_2d(anchor_points[0])
        return x_min, fx_min
    def test_create_cma_optimizer(self):
        optimizer = choose_optimizer('CMA', self.design_space.get_bounds())

        self.assertIsNotNone(optimizer)
    def test_create_direct_optimizer(self):
        optimizer = choose_optimizer('DIRECT', self.design_space.get_bounds())

        self.assertIsNotNone(optimizer)
    def test_create_cma_optimizer(self):
        optimizer = choose_optimizer('CMA', self.design_space.get_bounds())

        self.assertIsNotNone(optimizer)
    def test_create_direct_optimizer(self):
        optimizer = choose_optimizer('DIRECT', self.design_space.get_bounds())

        self.assertIsNotNone(optimizer)