예제 #1
0
def jacobian(func, argnum=None):
    """Returns the Jacobian as a callable function of vector-valued
    (functions of) QNodes.

    This is a wrapper around the :mod:`autograd.jacobian` function.

    Args:
        func (function): A vector-valued Python function or QNode that contains
            a combination of quantum and classical nodes. The output of the computation
            must consist of a single NumPy array (if classical) or a tuple of
            expectation values (if a quantum node)
        argnum (int or Sequence[int]): Which argument to take the gradient
            with respect to. If a sequence is given, the Jacobian matrix
            corresponding to all input elements and all output elements is returned.

    Returns:
        function: the function that returns the Jacobian of the input
        function with respect to the arguments in argnum
    """
    # pylint: disable=no-value-for-parameter

    if argnum is not None:
        # for backwards compatibility with existing code
        # that manually specifies argnum
        if isinstance(argnum, int):
            return _jacobian(func, argnum)

        return lambda *args, **kwargs: np.stack(
            [_jacobian(func, arg)(*args, **kwargs) for arg in argnum]).T

    def _jacobian_function(*args, **kwargs):
        """Inspect the arguments for differentiability, and
        compute the autograd gradient function with required argnums
        dynamically.

        This wrapper function is returned to the user instead of autograd.jacobian,
        so that we can take into account cases where the user computes the
        jacobian function once, but then calls it with arguments that change
        in differentiability.
        """
        argnum = []

        for idx, arg in enumerate(args):
            if getattr(arg, "requires_grad", True):
                argnum.append(idx)

        if not argnum:
            return tuple()

        if len(argnum) == 1:
            return _jacobian(func, argnum[0])(*args, **kwargs)

        return np.stack(
            [_jacobian(func, arg)(*args, **kwargs) for arg in argnum]).T

    return _jacobian_function
예제 #2
0
    def _jacobian_function(*args, **kwargs):
        """Inspect the arguments for differentiability, and
        compute the autograd gradient function with required argnums
        dynamically.

        This wrapper function is returned to the user instead of autograd.jacobian,
        so that we can take into account cases where the user computes the
        jacobian function once, but then calls it with arguments that change
        in differentiability.
        """
        argnum = []

        for idx, arg in enumerate(args):

            trainable = getattr(arg, "requires_grad", None)
            array_box = isinstance(arg, ArrayBox)

            if trainable is None and not array_box:

                warnings.warn(
                    "Starting with PennyLane v0.20.0, when using Autograd, inputs "
                    "have to explicitly specify requires_grad=True (or the "
                    "argnum argument must be passed) in order for trainable parameters to be "
                    "identified.",
                    UserWarning,
                )

            if trainable is None:
                trainable = True

            if trainable:
                argnum.append(idx)

        if not argnum:
            return tuple()

        if len(argnum) == 1:
            return _jacobian(func, argnum[0])(*args, **kwargs)

        jacobians = [_jacobian(func, arg)(*args, **kwargs) for arg in argnum]

        try:
            return np.stack(jacobians).T
        except ValueError:
            # The Jacobian of each argument is a different shape and cannot
            # be stacked; simply return the tuple of argument Jacobians.
            return tuple(jacobians)
예제 #3
0
    def _jacobian_function(*args, **kwargs):
        """Inspect the arguments for differentiability, and
        compute the autograd gradient function with required argnums
        dynamically.

        This wrapper function is returned to the user instead of autograd.jacobian,
        so that we can take into account cases where the user computes the
        jacobian function once, but then calls it with arguments that change
        in differentiability.
        """
        argnum = []

        for idx, arg in enumerate(args):
            if getattr(arg, "requires_grad", True):
                argnum.append(idx)

        if len(argnum) == 1:
            return _jacobian(func, argnum[0])(*args, **kwargs)

        return _np.stack(
            [_jacobian(func, arg)(*args, **kwargs) for arg in argnum]).T
예제 #4
0
def jacobian(func, argnum):
    """Returns the Jacobian as a callable function of vector-valued
    (functions of) QNodes.

    This is a wrapper around the :mod:`autograd.jacobian` function.

    Args:
        func (function): a vector-valued Python function or QNode that contains
            a combination of quantum and classical nodes. The output of the computation
            must consist of a single NumPy array (if classical) or a tuple of
            expectation values (if a quantum node)
        argnum (int or Sequence[int]): which argument to take the gradient
            with respect to. If a sequence is given, the Jacobian matrix
            corresponding to all input elements and all output elements is returned.

    Returns:
        function: the function that returns the Jacobian of the input
        function with respect to the arguments in argnum
    """
    # pylint: disable=no-value-for-parameter
    if isinstance(argnum, int):
        return _jacobian(func, argnum)
    return lambda *args, **kwargs: numpy.stack([_jacobian(func, arg)(*args, **kwargs) for arg in argnum]).T
예제 #5
0
def jacobian(func):
    """Wrap autograd's jacobian function.

    Parameters
    ----------
    func : callable
        Function whose Jacobian is computed.

    Returns
    -------
    _ : callable
        Function taking x as input and returning
        the jacobian of func at x.
    """
    return _jacobian(func)
예제 #6
0
def jacobian(func, argnum):
    """Returns the Jacobian (as a callable function) of vector-valued
    functions accessible within PennyLane.

    This is a wrapper around the :mod:`autograd.jacobian` function.

    Args:
        func (function): a vector-valued Python function or QNode that contains
            a combination of quantum and classical nodes. The output of the computation
            must consist of a single NumPy array (if classical) or a tuple of
            expectation values (if a quantum node)
        argnum (int): which argument to take the gradient
            with respect to. If the argument is a NumPy array, then the Jacobian
            corresponding to all input elements and all output elements is returned.

    Returns:
        function: the function that returns the Jacobian of the input
        function with respect to the arguments in argnum
    """
    # pylint: disable=no-value-for-parameter
    return _jacobian(func, argnum)
예제 #7
0
    def _jacobian_function(*args, **kwargs):
        """Compute the autograd Jacobian.

        This wrapper function is returned to the user instead of autograd.jacobian,
        so that we can take into account cases where the user computes the
        jacobian function once, but then calls it with arguments that change
        in differentiability.
        """
        if argnum is None:
            # Infer which arguments to consider trainable
            _argnum = _get_argnum(args)
            # Infer whether to unpack from the infered argnum
            unpack = len(_argnum) == 1
        else:
            # For a single integer as argnum, unpack the Jacobian tuple
            unpack = isinstance(argnum, int)
            _argnum = [argnum] if unpack else argnum

        jac = tuple(_jacobian(func, arg)(*args, **kwargs) for arg in _argnum)

        return jac[0] if unpack else jac
예제 #8
0
def jacobian(func, argnum=None):
    """Returns the Jacobian as a callable function of vector-valued
    (functions of) QNodes.

    This is a wrapper around the :mod:`autograd.jacobian` function.

    Args:
        func (function): A vector-valued Python function or QNode that contains
            a combination of quantum and classical nodes. The output of the computation
            must consist of a single NumPy array (if classical) or a tuple of
            expectation values (if a quantum node)
        argnum (int or Sequence[int]): Which argument to take the gradient
            with respect to. If a sequence is given, the Jacobian matrix
            corresponding to all input elements and all output elements is returned.

    Returns:
        function: the function that returns the Jacobian of the input
        function with respect to the arguments in argnum
    """
    # pylint: disable=no-value-for-parameter

    if argnum is not None:
        # for backwards compatibility with existing code
        # that manually specifies argnum
        if isinstance(argnum, int):
            return _jacobian(func, argnum)

        return lambda *args, **kwargs: np.stack(
            [_jacobian(func, arg)(*args, **kwargs) for arg in argnum]).T

    def _jacobian_function(*args, **kwargs):
        """Inspect the arguments for differentiability, and
        compute the autograd gradient function with required argnums
        dynamically.

        This wrapper function is returned to the user instead of autograd.jacobian,
        so that we can take into account cases where the user computes the
        jacobian function once, but then calls it with arguments that change
        in differentiability.
        """
        argnum = []

        for idx, arg in enumerate(args):

            trainable = getattr(arg, "requires_grad", None)
            array_box = isinstance(arg, ArrayBox)

            if trainable is None and not array_box:

                warnings.warn(
                    "Starting with PennyLane v0.20.0, when using Autograd, inputs "
                    "have to explicitly specify requires_grad=True (or the "
                    "argnum argument must be passed) in order for trainable parameters to be "
                    "identified.",
                    UserWarning,
                )

            if trainable is None:
                trainable = True

            if trainable:
                argnum.append(idx)

        if not argnum:
            return tuple()

        if len(argnum) == 1:
            return _jacobian(func, argnum[0])(*args, **kwargs)

        jacobians = [_jacobian(func, arg)(*args, **kwargs) for arg in argnum]

        try:
            return np.stack(jacobians).T
        except ValueError:
            # The Jacobian of each argument is a different shape and cannot
            # be stacked; simply return the tuple of argument Jacobians.
            return tuple(jacobians)

    return _jacobian_function
예제 #9
0
def jacobian(func):
    """Wrap autograd jacobian function."""
    return _jacobian(func)