Beispiel #1
0
def test_heterogeneous_operations_r(simulator, op, value1=(numpy.random.randint(1, 999) / 1000.0 * (numpy.pi / 2.0)),
                                    value2=(numpy.random.randint(1, 999) / 1000.0 * (numpy.pi / 2.0))):
    angle1 = Variable(name="angle1")
    angle2 = Variable(name="angle2")
    variables = {angle1: value1, angle2: value2}
    qubit = 0
    control = 1
    H1 = paulis.Y(qubit=qubit)
    U1 = gates.X(target=control) + gates.Rx(target=qubit, control=control, angle=angle1)
    e1 = ExpectationValue(U=U1, H=H1)
    added = Objective(args=[e1.args[0], angle2], transformation=op)
    val = simulate(added, variables=variables, backend=simulator)
    en1 = simulate(e1, variables=variables, backend=simulator)
    an1 = -np.sin(angle1(variables=variables))
    an2 = angle2(variables=variables)
    assert np.isclose(val, float(op(en1, an2)), atol=1.e-4)
    assert np.isclose(en1, an1, atol=1.e-4)
Beispiel #2
0
def test_heterogeneous_operations_l(simulator, op, value1=(numpy.random.randint(1, 1000) / 1000.0 * (numpy.pi / 2.0)),
                                    value2=(numpy.random.randint(1, 1000) / 1000.0 * (numpy.pi / 2.0))):
    angle1 = Variable(name="angle1")
    angle2 = Variable(name="angle2")
    variables = {angle1: value1, angle2: value2}
    qubit = 0
    control = 1
    H2 = paulis.X(qubit=qubit)
    U2 = gates.X(target=control) + gates.Ry(target=qubit, control=control, angle=angle2)
    e2 = ExpectationValue(U=U2, H=H2)
    added = Objective(args=[angle1, e2.args[0]], transformation=op)
    val = simulate(added, variables=variables, backend=simulator)
    en2 = simulate(e2, variables=variables, backend=simulator)
    an1 = angle1(variables=variables)
    an2 = np.sin(angle2(variables=variables))
    assert np.isclose(val, float(op(an1, en2)), atol=1.e-4)
    assert np.isclose(en2, an2, atol=1.e-4)
Beispiel #3
0
def test_heterogeneous_gradient_r_div(simulator):
    ### the reason we don't test float power here is that it keeps coming up NAN, because the argument is too small
    angle1 = Variable(name="angle1")
    value = (numpy.random.randint(100, 1000) / 1000.0 * (numpy.pi / 2.0))
    variables = {angle1: value}
    qubit = 0
    control = 1
    H1 = paulis.Y(qubit=qubit)
    U1 = gates.X(target=control) + gates.Rx(target=qubit, control=control, angle=angle1)
    e1 = ExpectationValue(U=U1, H=H1)
    added = Objective(args=[e1.args[0], angle1], transformation=np.true_divide)
    val = simulate(added, variables=variables, backend=simulator)
    en1 = simulate(e1, variables=variables, backend=simulator)
    an1 = -np.sin(angle1(variables=variables))
    anval = angle1(variables=variables)
    dO = grad(added, 'angle1')
    dE = grad(e1, 'angle1')
    deval = simulate(dE, variables=variables, backend=simulator)
    doval = simulate(dO, variables=variables, backend=simulator)
    dtrue = deval / anval - en1 / (anval ** 2)
    assert np.isclose(float(val), float(np.true_divide(en1, anval)))
    assert np.isclose(en1, an1, atol=1.e-4)
    assert np.isclose(doval, dtrue, atol=1.e-4)
Beispiel #4
0
    def __call__(self, objective: Objective,
                 initial_values: typing.Dict[Variable, numbers.Real],
                 variables: typing.List[Variable],
                 gradient: typing.Dict[Variable, Objective] = None,
                 qng: bool = False,
                 hessian: typing.Dict[typing.Tuple[Variable, Variable], Objective] = None,
                 samples: int = None,
                 backend: str = None,
                 backend_options: dict = None,
                 noise: NoiseModel = None,
                 reset_history: bool = True,
                 *args,
                 **kwargs) -> SciPyReturnType:
        """
        Optimizes with scipy and gives back the optimized angles
        Get the optimized energies over the history
        :param objective: The tequila Objective to minimize
        :param initial_valuesxx: initial values for the objective
        :param return_scipy_output: chose if the full scipy output shall be returned
        :param reset_history: reset the history before optimization starts (has no effect if self.save_history is False)
        :return: tuple of optimized energy ,optimized angles and scipy output
        """

        infostring = "Starting {method} optimization\n".format(method=self.method)
        infostring += "Objective: {} expectationvalues\n".format(objective.count_expectationvalues())

        if self.save_history and reset_history:
            self.reset_history()

        active_angles = {}
        for v in variables:
            active_angles[v] = initial_values[v]

        passive_angles = {}
        for k, v in initial_values.items():
            if k not in active_angles.keys():
                passive_angles[k] = v

        # Transform the initial value directory into (ordered) arrays
        param_keys, param_values = zip(*active_angles.items())
        param_values = numpy.array(param_values)

        bounds = None
        if self.method_bounds is not None:
            bounds = {k: None for k in active_angles}
            for k, v in self.method_bounds.items():
                if k in bounds:
                    bounds[k] = v
            infostring += "bounds : {}\n".format(self.method_bounds)
            names, bounds = zip(*bounds.items())
            assert (names == param_keys)  # make sure the bounds are not shuffled

        # do the compilation here to avoid costly recompilation during the optimization
        compiled_objective = compile(objective=objective, variables=initial_values, backend=backend, noise=noise,
                                     samples=samples, *args, **kwargs)

        E = _EvalContainer(objective=compiled_objective,
                           param_keys=param_keys,
                           samples=samples,
                           passive_angles=passive_angles,
                           save_history=self.save_history,
                           backend_options = backend_options,
                           silent=self.silent)

        # compile gradients
        if self.method in self.gradient_based_methods + self.hessian_based_methods and not isinstance(gradient, str):
            compiled_grad_objectives = dict()
            if gradient is None:
                gradient = {assign_variable(k): grad(objective=objective, variable=k) for k in active_angles.keys()}
            else:
                gradient = {assign_variable(k): v for k, v in gradient.items()}

            grad_exval = []
            for k in active_angles.keys():
                if k not in gradient:
                    raise Exception("No gradient for variable {}".format(k))
                grad_exval.append(gradient[k].count_expectationvalues())
                compiled_grad_objectives[k] = compile(objective=gradient[k], variables=initial_values,
                                                      samples=samples, noise=noise, backend=backend, *args, **kwargs)

            if qng:
                combos = get_qng_combos(objective, samples=samples, backend=backend,
                                        noise=noise, initial_values=initial_values)

                dE = _QngContainer(combos=combos,
                                   param_keys=param_keys,
                                   samples=samples,
                                   passive_angles=passive_angles,
                                   save_history=self.save_history,
                                   silent=self.silent,
                                   backend_options=backend_options)
            else:

                dE = _GradContainer(objective=compiled_grad_objectives,
                                    param_keys=param_keys,
                                    samples=samples,
                                    passive_angles=passive_angles,
                                    save_history=self.save_history,
                                    silent=self.silent,
                                    backend_options=backend_options)

                infostring += "Gradients: {} expectationvalues (min={}, max={})\n".format(sum(grad_exval),
                                                                                          min(grad_exval),
                                                                                          max(grad_exval))
        else:
            # use numerical gradient
            dE = gradient
            infostring += "Gradients: {}\n".format(gradient)

        # compile hessian

        if self.method in self.hessian_based_methods and not isinstance(hessian, str):

            if isinstance(gradient, str):
                raise TequilaScipyException("Can not use numerical gradients for Hessian based methods")
            if qng is True:
                raise TequilaScipyException('Quantum Natural Hessian not yet well-defined, sorry!')
            compiled_hess_objectives = dict()
            hess_exval = []
            for i, k in enumerate(active_angles.keys()):
                for j, l in enumerate(active_angles.keys()):
                    if j > i: continue
                    hess = grad(gradient[k], l)
                    compiled_hess = compile(objective=hess, variables=initial_values, samples=samples,
                                            noise=noise,
                                            backend=backend, *args, **kwargs)
                    compiled_hess_objectives[(k, l)] = compiled_hess
                    compiled_hess_objectives[(l, k)] = compiled_hess
                    hess_exval.append(compiled_hess.count_expectationvalues())

            ddE = _HessContainer(objective=compiled_hess_objectives,
                                 param_keys=param_keys,
                                 samples=samples,
                                 passive_angles=passive_angles,
                                 save_history=self.save_history,
                                 silent=self.silent)

            infostring += "Hessian: {} expectationvalues (min={}, max={})\n".format(sum(hess_exval), min(hess_exval),
                                                                                    max(hess_exval))

        else:
            infostring += "Hessian: {}\n".format(hessian)
            if self.method != "TRUST-CONSTR" and hessian is not None:
                raise TequilaScipyException("numerical hessians only for trust-constr method")
            ddE = hessian

        if not self.silent:
            print("ObjectiveType is {}".format(type(compiled_objective)))
            print(infostring)
            print("backend: {}".format(compiled_objective.backend))
            print("samples: {}".format(samples))
            print("{} active variables".format(len(active_angles)))

        # get the number of real scipy iterations for better histories
        real_iterations = []

        Es = []
        callback = lambda x, *args: real_iterations.append(len(E.history) - 1)
        res = scipy.optimize.minimize(E, x0=param_values, jac=dE, hess=ddE,
                                      args=(Es,),
                                      method=self.method, tol=self.tol,
                                      bounds=bounds,
                                      constraints=self.method_constraints,
                                      options=self.method_options,
                                      callback=callback)

        # failsafe since callback is not implemented everywhere
        if len(real_iterations) == 0:
            real_iterations = range(len(E.history))
        else:
            real_iterations = [0] + real_iterations
        if self.save_history:
            self.history.energies = [E.history[i] for i in real_iterations]
            self.history.energy_evaluations = E.history
            self.history.angles = [E.history_angles[i] for i in real_iterations]
            self.history.angles_evaluations = E.history_angles
            if dE is not None and not isinstance(dE, str):
                # can currently only save gradients if explicitly evaluated
                # and will fail for hessian based approaches
                # need better callback functions
                try:
                    if self.method not in self.hessian_based_methods:
                        self.history.gradients = [dE.history[i] for i in real_iterations]
                except:
                    print("WARNING: History could not assign the stored gradients")
                self.history.gradients_evaluations = dE.history
            if ddE is not None and not isinstance(ddE, str):
                # hessians are not evaluated in the same frequencies as energies
                # therefore we can not store the "real" iterations currently
                self.history.hessians_evaluations = ddE.history

        E_final = res.fun
        angles_final = dict((param_keys[i], res.x[i]) for i in range(len(param_keys)))
        angles_final = {**angles_final, **passive_angles}

        return SciPyReturnType(energy=E_final, angles=format_variable_dictionary(angles_final), history=self.history,
                               scipy_output=res)
Beispiel #5
0
def minimize(objective: Objective,
             gradient: typing.Union[str, typing.Dict[Variable, Objective]] = None,
             hessian: typing.Union[str, typing.Dict[typing.Tuple[Variable, Variable], Objective]] = None,
             qng: bool = None,
             initial_values: typing.Dict[typing.Hashable, numbers.Real] = None,
             variables: typing.List[typing.Hashable] = None,
             samples: int = None,
             maxiter: int = 100,
             backend: str = None,
             backend_options: dict = None,
             noise: NoiseModel = None,
             method: str = "BFGS",
             tol: float = 1.e-3,
             method_options: dict = None,
             method_bounds: typing.Dict[typing.Hashable, numbers.Real] = None,
             method_constraints=None,
             silent: bool = False,
             save_history: bool = True,
             *args,
             **kwargs) -> SciPyReturnType:
    """

    Parameters
    ----------
    objective: Objective :
        The tequila objective to optimize
    gradient: typing.Union[str, typing.Dict[Variable, Objective], None] : (Default value = None) :
        '2-point', 'cs' or '3-point' for numerical gradient evaluation (does not work in combination with all optimizers),
        dictionary of variables and tequila objective to define own gradient,
        None for automatic construction (default)
    hessian: typing.Union[str, typing.Dict[Variable, Objective], None] : (Default value = None) :
        '2-point', 'cs' or '3-point' for numerical gradient evaluation (does not work in combination with all optimizers),
        dictionary (keys:tuple of variables, values:tequila objective) to define own gradient,
        None for automatic construction (default)
    qng: bool : (Default value = False) :
        whether or not, in the event that a gradient-based method is to be used, the qng, rather than the standard gradient,
        should be employed. NOTE: throws an error for anything but a single expectationvalue with no passive angles.
    initial_values: typing.Dict[typing.Hashable, numbers.Real]: (Default value = None):
        Initial values as dictionary of Hashable types (variable keys) and floating point numbers. If given None they will all be set to zero
    variables: typing.List[typing.Hashable] :
         (Default value = None)
         List of Variables to optimize
    samples: int :
         (Default value = None)
         samples/shots to take in every run of the quantum circuits (None activates full wavefunction simulation)
    maxiter: int :
         (Default value = 100)
    backend: str :
         (Default value = None)
         Simulator backend, will be automatically chosen if set to None
    backend_options: dict:
         (Default value = None)
         Additional options for the backend
         Will be unpacked and passed to the compiled objective in every call
    noise: NoiseModel:
         (Default value =None)
         a NoiseModel to apply to all expectation values in the objective.
    method: str :
         (Default value = "BFGS")
         Optimization method (see scipy documentation, or 'available methods')
    tol: float :
         (Default value = 1.e-3)
         Convergence tolerance for optimization (see scipy documentation)
    method_options: dict :
         (Default value = None)
         Dictionary of options
         (see scipy documentation)
    method_bounds: typing.Dict[typing.Hashable, typing.Tuple[float, float]]:
        (Default value = None)
        bounds for the variables (see scipy documentation)
    method_constraints :
         (Default value = None)
         (see scipy documentation
    silent: bool :
         (Default value = False)
         No printout if True
    save_history: bool:
        (Default value = True)
        Save the history throughout the optimization

    Returns
    -------

    """

    # bring into right format
    variables = format_variable_list(variables)
    initial_values = format_variable_dictionary(initial_values)
    if isinstance(gradient, dict) or hasattr(gradient, "items"):
        gradient = format_variable_dictionary(gradient)
    if isinstance(hessian, dict) or hasattr(hessian, "items"):
        hessian = {(assign_variable(k[0]), assign_variable([k[1]])): v for k, v in hessian.items()}
    method_bounds = format_variable_dictionary(method_bounds)

    # set defaults
    all_variables = objective.extract_variables()
    if variables is None:
        variables = all_variables
    if initial_values is None:
        initial_values = {k: numpy.random.uniform(0, 2 * numpy.pi) for k in all_variables}
    else:
        # autocomplete initial values, warn if you did
        detected = False
        for k in all_variables:
            if k not in initial_values:
                initial_values[k] = numpy.random.uniform(0, 2 * numpy.pi)
                detected = True
        if detected and not silent:
            print("WARNING: initial_variables given but not complete: Autocomplete with random number")

    optimizer = OptimizerSciPy(save_history=save_history,
                               maxiter=maxiter,
                               method=method,
                               method_options=method_options,
                               method_bounds=method_bounds,
                               method_constraints=method_constraints,
                               silent=silent,
                               tol=tol)
    if initial_values is not None:
        initial_values = {assign_variable(k): v for k, v in initial_values.items()}
    return optimizer(objective=objective, qng=qng,
                     backend=backend, backend_options=backend_options, gradient=gradient, hessian=hessian, initial_values=initial_values,
                     variables=variables, noise=noise,
                     samples=samples, *args, **kwargs)
Beispiel #6
0
    def __call__(self,
                 objective: Objective,
                 variables: typing.List[Variable] = None,
                 initial_values: typing.Dict[Variable, numbers.Real] = None,
                 gradient: typing.Dict[Variable, Objective] = None,
                 hessian: typing.Dict[typing.Tuple[Variable, Variable],
                                      Objective] = None,
                 reset_history: bool = True,
                 *args,
                 **kwargs) -> SciPyResults:
        """
        Perform optimization using scipy optimizers.

        Parameters
        ----------
        objective: Objective:
            the objective to optimize.
        variables: list, optional:
            the variables of objective to optimize. If None: optimize all.
        initial_values: dict, optional:
            a starting point from which to begin optimization. Will be generated if None.
        gradient: optional:
            Information or object used to calculate the gradient of objective. Defaults to None: get analytically.
        hessian: optional:
            Information or object used to calculate the hessian of objective. Defaults to None: get analytically.
        reset_history: bool: Default = True:
            whether or not to reset all history before optimizing.
        args
        kwargs

        Returns
        -------
        ScipyReturnType:
            the results of optimization.
        """

        objective = objective.contract()
        infostring = "{:15} : {}\n".format("Method", self.method)
        infostring += "{:15} : {} expectationvalues\n".format(
            "Objective", objective.count_expectationvalues())

        if gradient is not None:
            infostring += "{:15} : {}\n".format("grad instr", gradient)
        if hessian is not None:
            infostring += "{:15} : {}\n".format("hess_instr", hessian)

        if self.save_history and reset_history:
            self.reset_history()

        active_angles, passive_angles, variables = self.initialize_variables(
            objective, initial_values, variables)

        # Transform the initial value directory into (ordered) arrays
        param_keys, param_values = zip(*active_angles.items())
        param_values = numpy.array(param_values)

        # process and initialize scipy bounds
        bounds = None
        if self.method_bounds is not None:
            bounds = {k: None for k in active_angles}
            for k, v in self.method_bounds.items():
                if k in bounds:
                    bounds[k] = v
            infostring += "{:15} : {}\n".format("bounds", self.method_bounds)
            names, bounds = zip(*bounds.items())
            assert (names == param_keys
                    )  # make sure the bounds are not shuffled

        # do the compilation here to avoid costly recompilation during the optimization
        compiled_objective = self.compile_objective(objective=objective,
                                                    *args,
                                                    **kwargs)
        E = _EvalContainer(objective=compiled_objective,
                           param_keys=param_keys,
                           samples=self.samples,
                           passive_angles=passive_angles,
                           save_history=self.save_history,
                           print_level=self.print_level)

        compile_gradient = self.method in (self.gradient_based_methods +
                                           self.hessian_based_methods)
        compile_hessian = self.method in self.hessian_based_methods

        dE = None
        ddE = None
        # detect if numerical gradients shall be used
        # switch off compiling if so
        if isinstance(gradient, str):
            if gradient.lower() == 'qng':
                compile_gradient = False
                if compile_hessian:
                    raise TequilaException(
                        'Sorry, QNG and hessian not yet tested together.')

                combos = get_qng_combos(objective,
                                        initial_values=initial_values,
                                        backend=self.backend,
                                        samples=self.samples,
                                        noise=self.noise)
                dE = _QngContainer(combos=combos,
                                   param_keys=param_keys,
                                   passive_angles=passive_angles)
                infostring += "{:15} : QNG {}\n".format("gradient", dE)
            else:
                dE = gradient
                compile_gradient = False
                if compile_hessian:
                    compile_hessian = False
                    if hessian is None:
                        hessian = gradient
                infostring += "{:15} : scipy numerical {}\n".format(
                    "gradient", dE)
                infostring += "{:15} : scipy numerical {}\n".format(
                    "hessian", ddE)

        if isinstance(gradient, dict):
            if gradient['method'] == 'qng':
                func = gradient['function']
                compile_gradient = False
                if compile_hessian:
                    raise TequilaException(
                        'Sorry, QNG and hessian not yet tested together.')

                combos = get_qng_combos(objective,
                                        func=func,
                                        initial_values=initial_values,
                                        backend=self.backend,
                                        samples=self.samples,
                                        noise=self.noise)
                dE = _QngContainer(combos=combos,
                                   param_keys=param_keys,
                                   passive_angles=passive_angles)
                infostring += "{:15} : QNG {}\n".format("gradient", dE)

        if isinstance(hessian, str):
            ddE = hessian
            compile_hessian = False

        if compile_gradient:
            grad_obj, comp_grad_obj = self.compile_gradient(
                objective=objective,
                variables=variables,
                gradient=gradient,
                *args,
                **kwargs)
            expvals = sum(
                [o.count_expectationvalues() for o in comp_grad_obj.values()])
            infostring += "{:15} : {} expectationvalues\n".format(
                "gradient", expvals)
            dE = _GradContainer(objective=comp_grad_obj,
                                param_keys=param_keys,
                                samples=self.samples,
                                passive_angles=passive_angles,
                                save_history=self.save_history,
                                print_level=self.print_level)
        if compile_hessian:
            hess_obj, comp_hess_obj = self.compile_hessian(
                variables=variables,
                hessian=hessian,
                grad_obj=grad_obj,
                comp_grad_obj=comp_grad_obj,
                *args,
                **kwargs)
            expvals = sum(
                [o.count_expectationvalues() for o in comp_hess_obj.values()])
            infostring += "{:15} : {} expectationvalues\n".format(
                "hessian", expvals)
            ddE = _HessContainer(objective=comp_hess_obj,
                                 param_keys=param_keys,
                                 samples=self.samples,
                                 passive_angles=passive_angles,
                                 save_history=self.save_history,
                                 print_level=self.print_level)
        if self.print_level > 0:
            print(self)
            print(infostring)
            print("{:15} : {}\n".format("active variables",
                                        len(active_angles)))

        Es = []

        optimizer_instance = self

        class SciPyCallback:
            energies = []
            gradients = []
            hessians = []
            angles = []
            real_iterations = 0

            def __call__(self, *args, **kwargs):
                self.energies.append(E.history[-1])
                self.angles.append(E.history_angles[-1])
                if dE is not None and not isinstance(dE, str):
                    self.gradients.append(dE.history[-1])
                if ddE is not None and not isinstance(ddE, str):
                    self.hessians.append(ddE.history[-1])
                self.real_iterations += 1
                if 'callback' in optimizer_instance.kwargs:
                    optimizer_instance.kwargs['callback'](E.history_angles[-1])

        callback = SciPyCallback()
        res = scipy.optimize.minimize(E,
                                      x0=param_values,
                                      jac=dE,
                                      hess=ddE,
                                      args=(Es, ),
                                      method=self.method,
                                      tol=self.tol,
                                      bounds=bounds,
                                      constraints=self.method_constraints,
                                      options=self.method_options,
                                      callback=callback)

        # failsafe since callback is not implemented everywhere
        if callback.real_iterations == 0:
            real_iterations = range(len(E.history))

        if self.save_history:
            self.history.energies = callback.energies
            self.history.energy_evaluations = E.history
            self.history.angles = callback.angles
            self.history.angles_evaluations = E.history_angles
            self.history.gradients = callback.gradients
            self.history.hessians = callback.hessians
            if dE is not None and not isinstance(dE, str):
                self.history.gradients_evaluations = dE.history
            if ddE is not None and not isinstance(ddE, str):
                self.history.hessians_evaluations = ddE.history

            # some methods like "cobyla" do not support callback functions
            if len(self.history.energies) == 0:
                self.history.energies = E.history
                self.history.angles = E.history_angles

        # some scipy methods always give back the last value and not the minimum (e.g. cobyla)
        ea = sorted(zip(E.history, E.history_angles), key=lambda x: x[0])
        E_final = ea[0][0]
        angles_final = ea[0][
            1]  #dict((param_keys[i], res.x[i]) for i in range(len(param_keys)))
        angles_final = {**angles_final, **passive_angles}

        return SciPyResults(energy=E_final,
                            history=self.history,
                            variables=format_variable_dictionary(angles_final),
                            scipy_result=res)
Beispiel #7
0
    def prepare(self, objective: Objective, initial_values: dict = None,
                variables: list = None, gradient=None):
        """
        perform all initialization for an objective, register it with lookup tables, and return it compiled.
        MUST be called before step is used.

        Parameters
        ----------
        objective: Objective:
            the objective to ready for optimization.
        initial_values: dict, optional:
            the initial values of to prepare the optimizer with.
            Default: choose randomly.
        variables: list, optional:
            which variables to optimize over, and hence prepare gradients for.
            Default value: optimize over all variables in objective.
        gradient: optional:
            extra keyword; information used to compile alternate gradients.
            Default: prepare the standard, analytical gradient.

        Returns
        -------
        Objective:
            compiled version of objective.
        """

        active_angles, passive_angles, variables = self.initialize_variables(objective, initial_values, variables)
        comp = self.compile_objective(objective=objective)
        for arg in comp.args:
            if hasattr(arg,'U'):
                if arg.U.device is not None:
                    # don't retrieve computer 100 times; pyquil errors out if this happens!
                    self.device = arg.U.device
                    break


        compile_gradient = True

        dE = None
        if isinstance(gradient, str):
            if gradient.lower() == 'qng':
                compile_gradient = False

                combos = get_qng_combos(objective, initial_values=initial_values, backend=self.backend,
                                        device=self.device,
                                        samples=self.samples, noise=self.noise,
                                        )
                dE = QNGVector(combos)
            else:
                gradient = {"method": gradient, "stepsize": 1.e-4}

        elif isinstance(gradient,dict):
            if gradient['method'] == 'qng':
                func = gradient['function']
                compile_gradient = False
                combos = get_qng_combos(objective,func=func, initial_values=initial_values, backend=self.backend,
                                        device=self.device,
                                        samples=self.samples, noise=self.noise)
                dE = QNGVector(combos)

        if compile_gradient:
            grad_obj, comp_grad_obj = self.compile_gradient(objective=objective, variables=variables, gradient=gradient)
            dE = CallableVector([comp_grad_obj[k] for k in comp_grad_obj.keys()])

        ostring = id(comp)
        if not self.silent:
            print(self)
            print("{:15} : {} expectationvalues".format("Objective", objective.count_expectationvalues()))
            if compile_gradient:
                counts = [x.count_expectationvalues() for x in comp_grad_obj.values()]
                print("{:15} : {} expectationvalues".format("Gradient", sum(counts)))
                print("{:15} : {}".format("gradient instr", gradient))
            print("{:15} : {}".format("active variables", len(active_angles)))

        vec_len = len(active_angles)
        first = numpy.zeros(vec_len)
        second = numpy.zeros(vec_len)
        self.gradient_lookup[ostring] = dE
        self.active_key_lookup[ostring] = active_angles.keys()
        self.moments_lookup[ostring] = (first, second)
        self.moments_trajectory[ostring] = [(first, second)]
        self.step_lookup[ostring] = 0
        return comp
Beispiel #8
0
    def __call__(self,
                 objective: Objective,
                 variables: typing.List[Variable] = None,
                 initial_values: typing.Dict[Variable, numbers.Real] = None,
                 gradient: typing.Dict[Variable, Objective] = None,
                 hessian: typing.Dict[typing.Tuple[Variable, Variable],
                                      Objective] = None,
                 reset_history: bool = True,
                 *args,
                 **kwargs) -> SciPyReturnType:
        """
        Optimizes with scipy and gives back the optimized angles
        Get the optimized energies over the history
        :param objective: The tequila Objective to minimize
        :param initial_values: initial values for the objective
        :param return_scipy_output: chose if the full scipy output shall be returned
        :param reset_history: reset the history before optimization starts (has no effect if self.save_history is False)
        :return: tuple of optimized energy ,optimized angles and scipy output
        """

        infostring = "{:15} : {}\n".format("Method", self.method)
        infostring += "{:15} : {} expectationvalues\n".format(
            "Objective", objective.count_expectationvalues())

        if gradient is not None:
            infostring += "{:15} : {}\n".format("grad instr", gradient)
        if hessian is not None:
            infostring += "{:15} : {}\n".format("hess_instr", hessian)

        if self.save_history and reset_history:
            self.reset_history()

        active_angles, passive_angles, variables = self.initialize_variables(
            objective, initial_values, variables)

        # Transform the initial value directory into (ordered) arrays
        param_keys, param_values = zip(*active_angles.items())
        param_values = numpy.array(param_values)

        # process and initialize scipy bounds
        bounds = None
        if self.method_bounds is not None:
            bounds = {k: None for k in active_angles}
            for k, v in self.method_bounds.items():
                if k in bounds:
                    bounds[k] = v
            infostring += "{:15} : {}\n".format("bounds", self.method_bounds)
            names, bounds = zip(*bounds.items())
            assert (names == param_keys
                    )  # make sure the bounds are not shuffled

        # do the compilation here to avoid costly recompilation during the optimization
        compiled_objective = self.compile_objective(objective=objective)
        E = _EvalContainer(objective=compiled_objective,
                           param_keys=param_keys,
                           samples=self.samples,
                           passive_angles=passive_angles,
                           save_history=self.save_history,
                           backend_options=self.backend_options,
                           print_level=self.print_level)

        compile_gradient = self.method in (self.gradient_based_methods +
                                           self.hessian_based_methods)
        compile_hessian = self.method in self.hessian_based_methods

        dE = None
        ddE = None
        # detect if numerical gradients shall be used
        # switch off compiling if so
        if isinstance(gradient, str):
            if gradient.lower() == 'qng':
                compile_gradient = False
                if compile_hessian:
                    raise TequilaException(
                        'Sorry, QNG and hessian not yet tested together.')

                combos = get_qng_combos(objective,
                                        initial_values=initial_values,
                                        backend=self.backend,
                                        samples=self.samples,
                                        noise=self.noise,
                                        backend_options=self.backend_options)
                dE = _QngContainer(combos=combos,
                                   param_keys=param_keys,
                                   passive_angles=passive_angles)
                infostring += "{:15} : QNG {}\n".format("gradient", dE)
            else:
                dE = gradient
                compile_gradient = False
                if compile_hessian:
                    compile_hessian = False
                    if hessian is None:
                        hessian = gradient
                infostring += "{:15} : scipy numerical {}\n".format(
                    "gradient", dE)
                infostring += "{:15} : scipy numerical {}\n".format(
                    "hessian", ddE)

        if isinstance(hessian, str):
            ddE = hessian
            compile_hessian = False

        if compile_gradient:
            grad_obj, comp_grad_obj = self.compile_gradient(
                objective=objective, variables=variables, gradient=gradient)
            expvals = sum(
                [o.count_expectationvalues() for o in comp_grad_obj.values()])
            infostring += "{:15} : {} expectationvalues\n".format(
                "gradient", expvals)
            dE = _GradContainer(objective=comp_grad_obj,
                                param_keys=param_keys,
                                samples=self.samples,
                                passive_angles=passive_angles,
                                save_history=self.save_history,
                                print_level=self.print_level,
                                backend_options=self.backend_options)

        if compile_hessian:
            hess_obj, comp_hess_obj = self.compile_hessian(
                variables=variables,
                hessian=hessian,
                grad_obj=grad_obj,
                comp_grad_obj=comp_grad_obj)
            expvals = sum(
                [o.count_expectationvalues() for o in comp_hess_obj.values()])
            infostring += "{:15} : {} expectationvalues\n".format(
                "hessian", expvals)
            ddE = _HessContainer(objective=comp_hess_obj,
                                 param_keys=param_keys,
                                 samples=self.samples,
                                 passive_angles=passive_angles,
                                 save_history=self.save_history,
                                 print_level=self.print_level,
                                 backend_options=self.backend_options)

        if self.print_level > 0:
            print(self)
            print(infostring)
            print("{:15} : {}\n".format("active variables",
                                        len(active_angles)))

        Es = []

        class SciPyCallback:
            energies = []
            gradients = []
            hessians = []
            angles = []
            real_iterations = 0

            def __call__(self, *args, **kwargs):
                self.energies.append(E.history[-1])
                self.angles.append(E.history_angles[-1])
                if dE is not None and not isinstance(dE, str):
                    self.gradients.append(dE.history[-1])
                if ddE is not None and not isinstance(ddE, str):
                    self.hessians.append(ddE.history[-1])
                self.real_iterations += 1

        callback = SciPyCallback()
        res = scipy.optimize.minimize(E,
                                      x0=param_values,
                                      jac=dE,
                                      hess=ddE,
                                      args=(Es, ),
                                      method=self.method,
                                      tol=self.tol,
                                      bounds=bounds,
                                      constraints=self.method_constraints,
                                      options=self.method_options,
                                      callback=callback)

        # failsafe since callback is not implemented everywhere
        if callback.real_iterations == 0:
            real_iterations = range(len(E.history))

        if self.save_history:
            self.history.energies = callback.energies
            self.history.energy_evaluations = E.history
            self.history.angles = callback.angles
            self.history.angles_evaluations = E.history_angles
            self.history.gradients = callback.gradients
            self.history.hessians = callback.hessians
            if dE is not None and not isinstance(dE, str):
                self.history.gradients_evaluations = dE.history
            if ddE is not None and not isinstance(ddE, str):
                self.history.hessians_evaluations = ddE.history

            # some methods like "cobyla" do not support callback functions
            if len(self.history.energies) == 0:
                self.history.energies = E.history
                self.history.angles = E.history_angles

        E_final = res.fun
        angles_final = dict(
            (param_keys[i], res.x[i]) for i in range(len(param_keys)))
        angles_final = {**angles_final, **passive_angles}

        return SciPyReturnType(energy=E_final,
                               angles=format_variable_dictionary(angles_final),
                               history=self.history,
                               scipy_output=res)
Beispiel #9
0
def minimize(objective: Objective,
             lr=0.01,
             method='sgd',
             qng: bool = False,
             stop_count=None,
             initial_values: typing.Dict[typing.Hashable, numbers.Real] = None,
             variables: typing.List[typing.Hashable] = None,
             samples: int = None,
             maxiter: int = 100,
             backend: str = None,
             noise: NoiseModel = None,
             silent: bool = False,
             save_history: bool = True,
             *args,
             **kwargs) -> GDReturnType:
    """

    Parameters
    ----------
    objective: Objective :
        The tequila objective to optimize
    lr: float >0:
        the learning rate. Default 0.01.
    method: string:
        which variation on Gradient Descent to use. Options include 'sgd','adam','nesterov','adagrad','rmsprop',
    qng: bool:
        whether or not the gradient calculated should be the quantum natural gradient or not. defaults to False.
    stop_count: int:
        how many steps after which to cease training if no improvement occurs. Default None results in going till maxiter is complete
    initial_values: typing.Dict[typing.Hashable, numbers.Real]: (Default value = None):
        Initial values as dictionary of Hashable types (variable keys) and floating point numbers. If given None they will all be set to zero
    variables: typing.List[typing.Hashable] :
         (Default value = None)
         List of Variables to optimize
    samples: int :
         (Default value = None)
         samples/shots to take in every run of the quantum circuits (None activates full wavefunction simulation)
    maxiter: int :
         (Default value = 100)
    backend: str :
         (Default value = None)
         Simulator backend, will be automatically chosen if set to None
    noise: NoiseModel:
         (Default value = None)
         a NoiseModel to apply to all expectation values in the objective.
    stop_count: int :
         (Default value = None)
         Convergence tolerance for optimization; if no improvement after this many epochs, stop.
    silent: bool :
         (Default value = False)
         No printout if True
    save_history: bool:
        (Default value = True)
        Save the history throughout the optimization


    optional kwargs may include beta, beta2, and rho, parameters which affect (but do not need to be altered) the various
    method algorithms.
    Returns
    -------

    """

    # bring into right format
    variables = format_variable_list(variables)
    initial_values = format_variable_dictionary(initial_values)

    # set defaults
    all_variables = objective.extract_variables()
    if variables is None:
        variables = all_variables
    if initial_values is None:
        initial_values = {
            k: numpy.random.uniform(0, 2 * numpy.pi)
            for k in all_variables
        }
    else:
        # autocomplete initial values, warn if you did
        detected = False
        for k in all_variables:
            if k not in initial_values:
                initial_values[k] = numpy.random.uniform(0, 2 * numpy.pi)
                detected = True
        if detected and not silent:
            print(
                "WARNING: initial_variables given but not complete: Autocomplete with random number"
            )

    optimizer = OptimizerGD(save_history=save_history,
                            maxiter=maxiter,
                            silent=silent)
    if initial_values is not None:
        initial_values = {
            assign_variable(k): v
            for k, v in initial_values.items()
        }
    return optimizer(objective=objective,
                     maxiter=maxiter,
                     lr=lr,
                     method=method,
                     qng=qng,
                     stop_count=stop_count,
                     backend=backend,
                     initial_values=initial_values,
                     variables=variables,
                     noise=noise,
                     samples=samples,
                     *args,
                     **kwargs)