Esempio n. 1
0
    def test_single_step(self, qnode, param, nums_frequency, spectra,
                         substep_optimizer, substep_kwargs):
        """Test executing a single step of the RotosolveOptimizer on a QNode."""
        param = tuple(np.array(p, requires_grad=True) for p in param)
        opt = RotosolveOptimizer(substep_optimizer, substep_kwargs)

        repack_param = len(param) == 1
        new_param_step = opt.step(
            qnode,
            *param,
            nums_frequency=nums_frequency,
            spectra=spectra,
        )
        if repack_param:
            new_param_step = (new_param_step, )

        assert (np.isscalar(new_param_step)
                and np.isscalar(param)) or len(new_param_step) == len(param)
        new_param_step_and_cost, old_cost = opt.step_and_cost(
            qnode,
            *param,
            nums_frequency=nums_frequency,
            spectra=spectra,
        )
        if repack_param:
            new_param_step_and_cost = (new_param_step_and_cost, )

        assert np.allclose(
            np.fromiter(_flatten(new_param_step_and_cost), dtype=float),
            np.fromiter(_flatten(new_param_step), dtype=float),
        )
        assert np.isclose(qnode(*param), old_cost)
Esempio n. 2
0
    def test_single_step(self, qnode, param, num_freq, optimizer, optimizer_kwargs):
        opt = RotosolveOptimizer()

        repack_param = len(param) == 1
        new_param_step = opt.step(
            qnode,
            *param,
            num_freqs=num_freq,
            optimizer=optimizer,
            optimizer_kwargs=optimizer_kwargs,
        )
        if repack_param:
            new_param_step = (new_param_step,)

        assert (np.isscalar(new_param_step) and np.isscalar(param)) or len(new_param_step) == len(
            param
        )
        new_param_step_and_cost, old_cost = opt.step_and_cost(
            qnode,
            *param,
            num_freqs=num_freq,
            optimizer=optimizer,
            optimizer_kwargs=optimizer_kwargs,
        )
        if repack_param:
            new_param_step_and_cost = (new_param_step_and_cost,)

        assert np.allclose(
            np.fromiter(_flatten(new_param_step_and_cost), dtype=float),
            np.fromiter(_flatten(new_param_step), dtype=float),
        )
        assert np.isclose(qnode(*param), old_cost)
Esempio n. 3
0
    def test_with_qnode(self, qnode, params, ids, nums_frequency, spectra,
                        shifts, exp_calls, mocker):
        """Run a full reconstruction on a QNode."""
        qnode = qml.QNode(qnode, dev_1)

        with qml.Tracker(qnode.device) as tracker:
            recons = reconstruct(qnode, ids, nums_frequency, spectra,
                                 shifts)(*params)
        assert tracker.totals["executions"] == exp_calls
        arg_names = list(signature(qnode.func).parameters.keys())
        for outer_key in recons:
            outer_key_num = arg_names.index(outer_key)
            for inner_key, rec in recons[outer_key].items():
                x0 = params[outer_key_num]
                if not pnp.isscalar(x0):
                    x0 = x0[inner_key]
                    shift_vec = qml.math.zeros_like(params[outer_key_num])
                    shift_vec[inner_key] = 1.0
                shift_vec = 1.0 if pnp.isscalar(
                    params[outer_key_num]) else shift_vec
                mask = (0.0 if pnp.isscalar(params[outer_key_num]) else
                        pnp.ones(qml.math.shape(params[outer_key_num])) -
                        shift_vec)
                univariate = lambda x: qnode(
                    *params[:outer_key_num],
                    params[outer_key_num] * mask + x * shift_vec,
                    *params[outer_key_num + 1:],
                )
                assert np.isclose(rec(x0), qnode(*params))
                assert np.isclose(rec(x0 + 0.1), univariate(x0 + 0.1))
                assert fun_close(rec, univariate, 10)
Esempio n. 4
0
def expand_num_freq(num_freq, param):
    if np.isscalar(num_freq):
        num_freq = [num_freq] * len(param)
    expanded = []
    for _num_freq, par in zip(num_freq, param):
        if np.isscalar(_num_freq) and np.isscalar(par):
            expanded.append(_num_freq)
        elif np.isscalar(_num_freq):
            expanded.append(np.ones_like(par) * _num_freq)
        elif np.isscalar(par):
            raise ValueError(f"{num_freq}\n{param}\n{_num_freq}\n{par}")
        elif len(_num_freq) == len(par):
            expanded.append(_num_freq)
        else:
            raise ValueError()
    return expanded
Esempio n. 5
0
 def _jacobian(*args, **kwargs):  # pylint: disable=unused-argument
     jac = torch.autograd.functional.jacobian(classical_preprocessing, args)
     if argnum is not None:
         if np.isscalar(argnum):
             jac = jac[argnum]
         else:
             jac = tuple((jac[idx] for idx in argnum))
     return jac
Esempio n. 6
0
        def _jacobian(*args, **kwargs):  # pylint: disable=unused-argument
            jac = torch.autograd.functional.jacobian(classical_preprocessing,
                                                     args)

            torch_argnum = argnum if argnum is not None else qml.math.get_trainable_indices(
                args)
            if np.isscalar(torch_argnum):
                jac = jac[torch_argnum]
            else:
                jac = tuple((jac[idx] for idx in torch_argnum))
            return jac
Esempio n. 7
0
 def _jacobian(*args, **kwargs):
     if argnum is None:
         jac = qml.jacobian(classical_preprocessing)(*args, **kwargs)
     elif np.isscalar(argnum):
         jac = qml.jacobian(classical_preprocessing,
                            argnum=argnum)(*args, **kwargs)
     else:
         jac = tuple((qml.jacobian(classical_preprocessing,
                                   argnum=i)(*args, **kwargs)
                      for i in argnum))
     return jac
Esempio n. 8
0
        def _jacobian(*args, **kwargs):
            if trainable_only:
                _argnum = list(range(len(args)))
                full_jac = jax.jacobian(classical_preprocessing,
                                        argnums=_argnum)(*args, **kwargs)
                if np.isscalar(argnum):
                    return full_jac[argnum]

                return tuple(full_jac[i] for i in argnum)

            return jax.jacobian(classical_preprocessing,
                                argnums=argnum)(*args, **kwargs)
Esempio n. 9
0
    def test_differentiability_jax(self, qnode, params, ids, nums_frequency,
                                   spectra, shifts, exp_calls, mocker):
        """Tests the reconstruction and differentiability with JAX."""
        jax = pytest.importorskip("jax")
        from jax.config import config

        config.update("jax_enable_x64", True)
        params = tuple(jax.numpy.array(par) for par in params)
        qnode = qml.QNode(qnode, dev_1, interface="jax")
        with qml.Tracker(qnode.device) as tracker:
            recons = reconstruct(qnode, ids, nums_frequency, spectra,
                                 shifts)(*params)
        assert tracker.totals["executions"] == exp_calls
        arg_names = list(signature(qnode.func).parameters.keys())
        for outer_key in recons:
            outer_key_num = arg_names.index(outer_key)
            for inner_key, rec in recons[outer_key].items():
                x0 = params[outer_key_num]
                if not pnp.isscalar(x0):
                    x0 = x0[inner_key]
                    shift_vec = qml.math.zeros_like(params[outer_key_num])
                    shift_vec = qml.math.scatter_element_add(
                        shift_vec, inner_key, 1.0)
                shift_vec = 1.0 if pnp.isscalar(
                    params[outer_key_num]) else shift_vec
                mask = (0.0 if pnp.isscalar(params[outer_key_num]) else
                        pnp.ones(qml.math.shape(params[outer_key_num])) -
                        shift_vec)
                univariate = lambda x: qnode(
                    *params[:outer_key_num],
                    params[outer_key_num] * mask + x * shift_vec,
                    *params[outer_key_num + 1:],
                )
                exp_qnode_grad = jax.grad(qnode, argnums=outer_key_num)
                exp_grad = jax.grad(univariate)
                grad = jax.grad(rec)
                assert np.isclose(grad(x0), exp_qnode_grad(*params)[inner_key])
                assert np.isclose(grad(x0 + 0.1), exp_grad(x0 + 0.1))
                assert fun_close(grad, exp_grad, 10)
Esempio n. 10
0
        def _jacobian(*args, **kwargs):
            if np.isscalar(argnum):
                sub_args = args[argnum]
            elif argnum is None:
                sub_args = args
            else:
                sub_args = tuple((args[i] for i in argnum))

            with tf.GradientTape() as tape:
                gate_params = classical_preprocessing(*args, **kwargs)

            jac = tape.jacobian(gate_params, sub_args)
            return jac
Esempio n. 11
0
 def test_differentiability_autograd(self, qnode, params, ids,
                                     nums_frequency, spectra, shifts,
                                     exp_calls, mocker):
     """Tests the reconstruction and differentiability with autograd."""
     qnode = qml.QNode(qnode, dev_1, interface="autograd")
     with qml.Tracker(qnode.device) as tracker:
         recons = reconstruct(qnode, ids, nums_frequency, spectra,
                              shifts)(*params)
     assert tracker.totals["executions"] == exp_calls
     arg_names = list(signature(qnode.func).parameters.keys())
     for outer_key in recons:
         outer_key_num = arg_names.index(outer_key)
         for inner_key, rec in recons[outer_key].items():
             x0 = params[outer_key_num]
             if not pnp.isscalar(x0):
                 x0 = x0[inner_key]
                 shift_vec = qml.math.zeros_like(params[outer_key_num])
                 shift_vec[inner_key] = 1.0
             shift_vec = 1.0 if pnp.isscalar(
                 params[outer_key_num]) else shift_vec
             mask = (0.0 if pnp.isscalar(params[outer_key_num]) else
                     pnp.ones(qml.math.shape(params[outer_key_num])) -
                     shift_vec)
             univariate = lambda x: qnode(
                 *params[:outer_key_num],
                 params[outer_key_num] * mask + x * shift_vec,
                 *params[outer_key_num + 1:],
             )
             exp_qnode_grad = qml.grad(qnode, argnum=outer_key_num)
             exp_grad = qml.grad(univariate)
             grad = qml.grad(rec)
             if nums_frequency is None:
                 # Gradient evaluation at reconstruction point not supported for
                 # Dirichlet reconstruction
                 assert np.isclose(grad(x0),
                                   exp_qnode_grad(*params)[inner_key])
             assert np.isclose(grad(x0 + 0.1), exp_grad(x0 + 0.1))
             assert fun_close(grad, exp_grad, 10)
Esempio n. 12
0
def test_multiple_steps(fun, x_min, param, num_freq):
    """Tests that repeated steps execute as expected."""
    opt = RotosolveOptimizer()

    optimizer = "brute"
    optimizer_kwargs = None
    for _ in range(3):
        param = opt.step(
            fun,
            *param,
            num_freqs=num_freq,
            optimizer=optimizer,
            optimizer_kwargs=optimizer_kwargs,
        )
        # The following accounts for the unpacking functionality for length-1 param
        if len(x_min) == 1:
            param = (param,)

    assert (np.isscalar(x_min) and np.isscalar(param)) or len(x_min) == len(param)
    assert np.allclose(
        np.fromiter(_flatten(x_min), dtype=float),
        np.fromiter(_flatten(param), dtype=float),
        atol=1e-5,
    )
Esempio n. 13
0
def test_multiple_steps(fun, x_min, param, num_freq):
    """Tests that repeated steps execute as expected."""
    param = tuple(np.array(p, requires_grad=True) for p in param)
    substep_optimizer = "brute"
    substep_kwargs = None
    opt = RotosolveOptimizer(substep_optimizer, substep_kwargs)

    for _ in range(3):
        param = opt.step(
            fun,
            *param,
            nums_frequency=num_freq,
        )
        # The following accounts for the unpacking functionality for length-one param
        if len(x_min) == 1:
            param = (param, )

    assert (np.isscalar(x_min)
            and np.isscalar(param)) or len(x_min) == len(param)
    assert np.allclose(
        np.fromiter(_flatten(x_min), dtype=float),
        np.fromiter(_flatten(param), dtype=float),
        atol=1e-5,
    )