Esempio n. 1
0
def __grad_vector_objective(objective: typing.Union[Objective,
                                                    VectorObjective],
                            variable: Variable):
    argsets = objective.argsets
    transformations = objective._transformations
    outputs = []
    for pos in range(len(objective)):
        args = argsets[pos]
        transformation = transformations[pos]
        dO = None

        processed_expectationvalues = {}
        for i, arg in enumerate(args):
            if __AUTOGRAD__BACKEND__ == "jax":
                df = jax.grad(transformation, argnums=i)
            elif __AUTOGRAD__BACKEND__ == "autograd":
                df = jax.grad(transformation, argnum=i)
            else:
                raise TequilaException(
                    "Can't differentiate without autograd or jax")

            # We can detect one simple case where the outer derivative is const=1
            if transformation is None or transformation == identity:
                outer = 1.0
            else:
                outer = Objective(args=args, transformation=df)

            if hasattr(arg, "U"):
                # save redundancies
                if arg in processed_expectationvalues:
                    inner = processed_expectationvalues[arg]
                else:
                    inner = __grad_inner(arg=arg, variable=variable)
                    processed_expectationvalues[arg] = inner
            else:
                # this means this inner derivative is purely variable dependent
                inner = __grad_inner(arg=arg, variable=variable)

            if inner == 0.0:
                # don't pile up zero expectationvalues
                continue

            if dO is None:
                dO = outer * inner
            else:
                dO = dO + outer * inner

        if dO is None:
            dO = Objective()
        outputs.append(dO)
    if len(outputs) == 1:
        return outputs[0]
    return outputs
Esempio n. 2
0
def __grad_objective(objective: Objective, variable: Variable):
    args = objective.args
    transformation = objective.transformation
    dO = None

    processed_expectationvalues = {}
    for i, arg in enumerate(args):
        if __AUTOGRAD__BACKEND__ == "jax":
            df = jax.grad(transformation, argnums=i)
        elif __AUTOGRAD__BACKEND__ == "autograd":
            df = jax.grad(transformation, argnum=i)
        else:
            raise TequilaException(
                "Can't differentiate without autograd or jax")

        # We can detect one simple case where the outer derivative is const=1
        if objective.transformation is None:
            outer = 1.0
        else:
            outer = Objective(args=args, transformation=df)

        if hasattr(arg, "U"):
            # save redundancies
            if arg in processed_expectationvalues:
                inner = processed_expectationvalues[arg]
            else:
                inner = __grad_inner(arg=arg, variable=variable)
                processed_expectationvalues[arg] = inner
        else:
            # this means this inner derivative is purely variable dependent
            inner = __grad_inner(arg=arg, variable=variable)

        if inner == 0.0:
            # don't pile up zero expectationvalues
            continue

        if dO is None:
            dO = outer * inner
        else:
            dO = dO + outer * inner

    if dO is None:
        raise TequilaException("caught None in __grad_objective")
    return dO
Esempio n. 3
0
def get_qng_combos(objective,
                   func=stokes_block,
                   initial_values=None,
                   samples=None,
                   backend=None,
                   device=None,
                   noise=None) -> typing.List[typing.Dict]:
    """
    get all the objects needed to evaluate the qng for some objective; return them in a list of dictionaries.

    Parameters
    ----------
    objective: Objective:
        the Objective whose qng is sought.
    func: callable: (Default = stokes_block):
        the function used to obtain the (blocks of) the qgt. Default uses stokes_block, defined above.
    initial_values: dict, optional:
        a dictionary indicating the intial parameters with which to compile all objectives appearing in the qng.
    samples: int, optional:
        the number of samples with which to compile all objectives appearing in the qng. Default: none.
    backend: str, optional:
        the backend with which to compile all objectives appearing in the qng. default: pick for you.
    device: optional:
        the device with which to compile all objectives appearing in the qng. Default: no device use or emulation.
    noise: str or NoiseModel, optional:
        the noise model with which to compile all objectives appearing in the qng. Default: no noise.

    Returns
    -------
    list of dicts:
        a list of dictionaries, each entry corresponding to the qng for 1 argument of objective, in the order
        of said objectives.

    """

    combos = []
    vars = objective.extract_variables()
    compiled = compile_multitarget(gate=objective)
    compiled = compile_trotterized_gate(gate=compiled)
    compiled = compile_h_power(gate=compiled)
    compiled = compile_power_gate(gate=compiled)
    compiled = compile_controlled_phase(gate=compiled)
    compiled = compile_controlled_rotation(gate=compiled)
    for i, arg in enumerate(compiled.args):
        if not isinstance(arg, ExpectationValueImpl):
            ### this is a variable, no QNG involved
            mat = QngMatrix([[[1]]])
            vec = CallableVector([__grad_inner(arg, arg)])
            mapping = {0: {v: __grad_inner(arg, v) for v in vars}}
        else:
            ### if the arg is an expectationvalue, we need to build some qngs and mappings!
            blocks = func(arg,
                          initial_values=initial_values,
                          samples=samples,
                          device=device,
                          backend=backend,
                          noise=noise)
            mat = QngMatrix(blocks)

            vec = subvector_procedure(arg,
                                      initial_values=initial_values,
                                      samples=samples,
                                      device=device,
                                      backend=backend,
                                      noise=noise)

            mapping = {}
            self_pars = get_self_pars(arg.U)
            for j, p in enumerate(self_pars):
                indict = {}
                for v in p.extract_variables():
                    gi = __grad_inner(p, v)
                    if isinstance(gi, Objective):
                        g = compile_objective(gi,
                                              variables=initial_values,
                                              samples=samples,
                                              device=device,
                                              backend=backend,
                                              noise=noise)
                    else:
                        g = gi
                    indict[v] = g
                mapping[j] = indict

        posarg = jax.grad(compiled.transformation, i)

        p = Objective(compiled.args, transformation=posarg)

        pos = compile_objective(p,
                                variables=initial_values,
                                samples=samples,
                                device=device,
                                backend=backend,
                                noise=noise)
        combos.append(qng_dict(arg, mat, vec, mapping, pos))
    return combos
Esempio n. 4
0
def get_qng_combos(objective,
                   initial_values=None,
                   samples=None,
                   backend=None,
                   backend_options=None,
                   noise=None):
    combos = []
    vars = objective.extract_variables()
    compiled = compile_multitarget(gate=objective)
    compiled = compile_trotterized_gate(gate=compiled)
    compiled = compile_h_power(gate=compiled)
    compiled = compile_power_gate(gate=compiled)
    compiled = compile_controlled_phase(gate=compiled)
    compiled = compile_controlled_rotation(gate=compiled)
    for i, arg in enumerate(compiled.args):
        if not isinstance(arg, ExpectationValueImpl):
            ### this is a variable, no QNG involved
            mat = QngMatrix([[[1]]])
            vec = CallableVector([__grad_inner(arg, arg)])
            mapping = {0: {v: __grad_inner(arg, v) for v in vars}}
        else:
            ### if the arg is an expectationvalue, we need to build some qngs and mappings!
            blocks = qng_metric_tensor_blocks(arg,
                                              initial_values=initial_values,
                                              samples=samples,
                                              backend=backend,
                                              noise=noise,
                                              backend_options=backend_options)
            mat = QngMatrix(blocks)

            vec = subvector_procedure(arg,
                                      initial_values=initial_values,
                                      samples=samples,
                                      backend=backend,
                                      noise=noise,
                                      backend_options=backend_options)

            mapping = {}
            self_pars = get_self_pars(arg.U)
            for j, p in enumerate(self_pars):
                indict = {}
                for v in p.extract_variables():
                    gi = __grad_inner(p, v)
                    if isinstance(gi, Objective):
                        g = compile_objective(gi,
                                              variables=initial_values,
                                              samples=samples,
                                              backend=backend,
                                              noise=noise,
                                              backend_options=backend_options)
                    else:
                        g = gi
                    indict[v] = g
                mapping[j] = indict

        posarg = jax.grad(compiled.transformation, argnums=i)
        p = Objective(compiled.args, transformation=posarg)

        pos = compile_objective(p,
                                variables=initial_values,
                                samples=samples,
                                backend=backend,
                                noise=noise,
                                backend_options=backend_options)
        combos.append(qng_dict(arg, mat, vec, mapping, pos))
    return combos