Ejemplo n.º 1
0
    def optimize(self,
                 num_vars,
                 objective_function,
                 gradient_function=None,
                 variable_bounds=None,
                 initial_point=None):
        """
        Perform optimization.
        Args:
            num_vars (int) : number of parameters to be optimized.
            objective_function (callable) : handle to a function that
                computes the objective function.
            gradient_function (callable) : handle to a function that
                computes the gradient of the objective function, or
                None if not available.
            variable_bounds (list[(float, float)]) : deprecated
            initial_point (numpy.ndarray[float]) : initial point.
        Returns:
            point, value, nfev
               point: is a 1D numpy.ndarray[float] containing the solution
               value: is a float with the objective function value
               nfev: number of objective function calls made if available or None
        """
        super().optimize(num_vars, objective_function, gradient_function,
                         variable_bounds, initial_point)
        if initial_point is None:
            initial_point = aqua_globals.random.rand(num_vars)
        if gradient_function is None:
            gradient_function = Optimizer.wrap_function(
                Optimizer.gradient_num_diff, (objective_function, self._eps))

        point, value, nfev = self.minimize(objective_function, initial_point,
                                           gradient_function)
        return point, value, nfev
Ejemplo n.º 2
0
Archivo: cg.py Proyecto: Travis-S/aqua
    def optimize(self, num_vars, objective_function, gradient_function=None, variable_bounds=None, initial_point=None):
        super().optimize(num_vars, objective_function, gradient_function, variable_bounds, initial_point)

        if gradient_function is None and self._batch_mode:
            epsilon = self._options['eps']
            gradient_function = Optimizer.wrap_function(Optimizer.gradient_num_diff, (objective_function, epsilon))

        res = minimize(objective_function, initial_point, jac=gradient_function, tol=self._tol, method="CG", options=self._options)
        return res.x, res.fun, res.nfev
Ejemplo n.º 3
0
    def optimize(self, num_vars, objective_function, gradient_function=None, variable_bounds=None, initial_point=None):
        super().optimize(num_vars, objective_function, gradient_function, variable_bounds, initial_point)

        if gradient_function is None and self._max_evals_grouped > 1:
            epsilon = self._options['eps']
            gradient_function = Optimizer.wrap_function(Optimizer.gradient_num_diff, (objective_function, epsilon, self._max_evals_grouped))

        res = minimize(objective_function, initial_point, jac=gradient_function, tol=self._tol,
                       bounds=variable_bounds, method="TNC", options=self._options)
        # Note: nfev here seems to be iterations not function evaluations
        return res.x, res.fun, res.nfev
Ejemplo n.º 4
0
    def optimize(self, num_vars, objective_function, gradient_function=None, variable_bounds=None, initial_point=None):
        super().optimize(num_vars, objective_function, gradient_function, variable_bounds, initial_point)

        if gradient_function is None and self._max_evals_grouped > 1:
            epsilon = self._options['epsilon']
            gradient_function = Optimizer.wrap_function(Optimizer.gradient_num_diff, (objective_function, epsilon, self._max_evals_grouped))

        approx_grad = True if gradient_function is None else False
        sol, opt, info = sciopt.fmin_l_bfgs_b(objective_function, initial_point, bounds=variable_bounds,
                                              fprime=gradient_function, approx_grad=approx_grad, **self._options)

        return sol, opt, info['funcalls']
Ejemplo n.º 5
0
    def optimize(
        self,
        num_vars: int,
        objective_function: Callable[[np.ndarray], float],
        gradient_function: Optional[Callable[[np.ndarray], float]] = None,
        variable_bounds: Optional[List[Tuple[float, float]]] = None,
        initial_point: Optional[np.ndarray] = None
    ) -> Tuple[np.ndarray, float, int]:

        super().optimize(num_vars, objective_function, gradient_function,
                         variable_bounds, initial_point)
        if initial_point is None:
            initial_point = aqua_globals.random.random(num_vars)
        if gradient_function is None:
            gradient_function = Optimizer.wrap_function(
                Optimizer.gradient_num_diff, (objective_function, self._eps))

        point, value, nfev = self.minimize(objective_function, initial_point,
                                           gradient_function)
        return point, value, nfev, self.loss_list, self.params
Ejemplo n.º 6
0
    def optimize(self,
                 num_vars,
                 objective_function,
                 gradient_function=None,
                 variable_bounds=None,
                 initial_point=None):
        super().optimize(num_vars, objective_function, gradient_function,
                         variable_bounds, initial_point)

        if gradient_function is None and self._max_evals_grouped > 1:
            epsilon = self._options['eps']
            gradient_function = Optimizer.wrap_function(
                Optimizer.gradient_num_diff,
                (objective_function, epsilon, self._max_evals_grouped))

        intermediate_parameters = []

        def callbackF(Xi):
            intermediate_parameters.append(Xi)

        res = minimize(objective_function,
                       initial_point,
                       jac=gradient_function,
                       tol=self._tol,
                       bounds=variable_bounds,
                       method="SLSQP",
                       options=self._options,
                       callback=callbackF)

        intermediate_parameters = np.ravel(np.array(intermediate_parameters))
        fichero_escribir = open('mis_fichero.txt', 'w')
        for i in range(len(intermediate_parameters)):
            fichero_escribir.write('%.10f\n' % intermediate_parameters[i])
        fichero_escribir.close()

        return res.x, res.fun, res.nfev
Ejemplo n.º 7
0
 def optimizer(self, optimizer: Optimizer):
     """ Sets optimizer """
     super(VQE, self.__class__).optimizer.__set__(self,
                                                  optimizer)  # type: ignore
     if optimizer is not None:
         optimizer.set_max_evals_grouped(self._max_evals_grouped)
Ejemplo n.º 8
0
 def optimizer(self, optimizer: Optimizer):
     """ Sets optimizer """
     super().optimizer = optimizer
     if optimizer is not None:
         optimizer.set_max_evals_grouped(self._max_evals_grouped)