Beispiel #1
0
    def run(self, M):
        """ Performs the optimization.

        Args:
            M: number of measurements per iteration

        Returns:
            (best_params, cost_history)
                where

            best_params: calculated values for ["θ_y", "θ_x"] (as list)
            cost_history: costs for each iteration (as list)
        """

        # Clear costs log
        self.cost_history = []

        # Use SPSA as our classical optimizer
        optimizer = SPSA()

        # Define cost function
        def cost_(params):
            return self.cost(params, M)

        # Randomize initial point
        initial_point = [np.random.rand() * np.pi for _ in range(2)]

        # Perform optimization
        best_params, _, _ = optimizer.optimize(
            num_vars=2,
            objective_function=cost_,
            variable_bounds=[(0, 2 * np.pi)] * 2,
            initial_point=initial_point)

        return best_params, self.cost_history
    def optimize(self, maxiter=500):
        thetas = np.array(self.qri.generate(self.num_qubits)) / 32

        spsa = SPSA(maxiter=maxiter)
        minima, _, _ = spsa.optimize(self.num_qubits,
                                     self._objective_fn,
                                     initial_point=thetas)

        self.minima = list(minima)
Beispiel #3
0
    def minimize(self, cost_function, initial_params=None):
        """
        Minimizes given cost function using optimizers from Qiskit Aqua.

        Args:
            cost_function(): python method which takes numpy.ndarray as input
            initial_params(np.ndarray): initial parameters to be used for optimization

        Returns:
            optimization_results(scipy.optimize.OptimizeResults): results of the optimization.
        """
        history = []

        if self.method == "SPSA":
            optimizer = SPSA(**self.options)
        elif self.method == "ADAM" or self.method == "AMSGRAD":
            if self.method == "AMSGRAD":
                self.options["amsgrad"] = True
            optimizer = ADAM(**self.options)

        number_of_variables = len(initial_params)

        if self.keep_value_history:
            cost_function_wrapper = recorder(cost_function)
        else:
            cost_function_wrapper = _CostFunctionWrapper(cost_function)

        gradient_function = None
        if hasattr(cost_function, "gradient") and callable(
                getattr(cost_function, "gradient")):
            gradient_function = cost_function.gradient

        solution, value, nit = optimizer.optimize(
            num_vars=number_of_variables,
            objective_function=cost_function_wrapper,
            initial_point=initial_params,
            gradient_function=gradient_function,
        )

        if self.keep_value_history:
            nfev = len(cost_function_wrapper.history)
            history = cost_function_wrapper.history
        else:
            nfev = cost_function_wrapper.number_of_calls
            history = []

        return optimization_result(
            opt_value=value,
            opt_params=solution,
            nit=nit,
            history=history,
            nfev=nfev,
        )
Beispiel #4
0
class QKSPSA:
    def __init__(
            self,
            max_iter=200,  # Minimizer iterations.
            n_g=1,  # Averaging number
    ):
        from qiskit.aqua.components.optimizers import SPSA
        self.optimizer = SPSA(max_trials=max_iter, last_avg=n_g)

    def set_loss_function(self, loss_function):
        self.L = loss_function

    def __call__(self, theta):
        #import numpy as np
        params = self.optimizer.optimize(len(theta),
                                         self.L,
                                         initial_point=theta)
        return params[0]
Beispiel #5
0
def LET_optimizer(self, backend, LET_circuits_dict, random):
    '''Finds set of parameters that optimize LET circuit'''

    params_size = len(LET_circuits_dict[1].parameters) # number of parameters to optimize

    variable_bounds = []
    for var in range(self.vff_options['n_diagonal_gates']):
        variable_bounds.append((0, 6 * self.vff_options['training_time']))
    for var in range(self.vff_options['n_diagonal_gates'], params_size):
        variable_bounds.append((0, 2 * np.pi))
    callback_list = []

    # the SPSA optimizer has a distinct implementation, being a part of qiskit
    if self.vff_options['min_method'] == 'SPSA':


        spsa_optimizer = SPSA(maxiter = self.vff_options['maxiter'])

        # setting the non-variable parameters
        wrapped_cost_function = spsa_optimizer.wrap_function(function = cost_function_LET, 
                                                             args = (self, backend, LET_circuits_dict, callback_list)
                                                            )

        opt_cost = 1.
        opt_params = np.zeros(params_size)
        repetition = 0
        while opt_cost > self.vff_options['target_fidelity'] and repetition < self.vff_options['max_reps']:

            seed = np.zeros(params_size)
            seed[: self.vff_options['n_diagonal_gates']] = 6 * self.vff_options['training_time'] * np.random.random(self.vff_options['n_diagonal_gates'])
            seed[self.vff_options['n_diagonal_gates'] :] = 2 * np.pi * np.random.random(params_size - self.vff_options['n_diagonal_gates'])
            #seed = 2 * np.pi * np.random.random(params_size) # random initial guess for optimal parameters


            new_opt_params, new_opt_cost, new_nfenv = spsa_optimizer.optimize(objective_function = wrapped_cost_function,
                                                                  num_vars = params_size,
                                                                  initial_point = seed,
                                                                  variable_bounds = variable_bounds
                                                                 )

            if new_opt_cost < opt_cost:
                opt_cost = new_opt_cost
                opt_params = new_opt_params

            repetition += 1
            
        self.callback_vff = callback_list

    else:

        if random:        
            seed = np.zeros(params_size)
            seed[: self.vff_options['n_diagonal_gates']] = 6 * self.vff_options['training_time'] * np.random.random(self.vff_options['n_diagonal_gates'])
            seed[self.vff_options['n_diagonal_gates'] :] = 2 * np.pi * np.random.random(params_size - self.vff_options['n_diagonal_gates'])
        else:
            seed = self.optimal_VFF_params

        # scipy.optimize's minimization method
        out = minimize(cost_function_LET, 
                       x0      = seed,
                       args    = (self, backend, LET_circuits_dict, callback_list),
                       method  = self.vff_options['min_method'], 
                       tol     = self.vff_options['tolerance'],
                       options = {'maxiter': self.vff_options['maxiter'],
                                  'gtol': self.vff_options['tolerance'],
                                  'ftol': self.vff_options['tolerance'],
                                  'display': True}
                      )

        opt_params = out['x']
        opt_cost   = out['fun']

        self.callback_vff = callback_list


    print('opt_cost', opt_cost)


    return opt_params, opt_cost