Exemplo n.º 1
0
    def test_single_step(self, qnode, param, num_freq, optimizer, optimizer_kwargs):
        opt = RotosolveOptimizer()

        repack_param = len(param) == 1
        new_param_step = opt.step(
            qnode,
            *param,
            num_freqs=num_freq,
            optimizer=optimizer,
            optimizer_kwargs=optimizer_kwargs,
        )
        if repack_param:
            new_param_step = (new_param_step,)

        assert (np.isscalar(new_param_step) and np.isscalar(param)) or len(new_param_step) == len(
            param
        )
        new_param_step_and_cost, old_cost = opt.step_and_cost(
            qnode,
            *param,
            num_freqs=num_freq,
            optimizer=optimizer,
            optimizer_kwargs=optimizer_kwargs,
        )
        if repack_param:
            new_param_step_and_cost = (new_param_step_and_cost,)

        assert np.allclose(
            np.fromiter(_flatten(new_param_step_and_cost), dtype=float),
            np.fromiter(_flatten(new_param_step), dtype=float),
        )
        assert np.isclose(qnode(*param), old_cost)
Exemplo n.º 2
0
    def test_single_step(self, qnode, param, nums_frequency, spectra,
                         substep_optimizer, substep_kwargs):
        """Test executing a single step of the RotosolveOptimizer on a QNode."""
        param = tuple(np.array(p, requires_grad=True) for p in param)
        opt = RotosolveOptimizer(substep_optimizer, substep_kwargs)

        repack_param = len(param) == 1
        new_param_step = opt.step(
            qnode,
            *param,
            nums_frequency=nums_frequency,
            spectra=spectra,
        )
        if repack_param:
            new_param_step = (new_param_step, )

        assert (np.isscalar(new_param_step)
                and np.isscalar(param)) or len(new_param_step) == len(param)
        new_param_step_and_cost, old_cost = opt.step_and_cost(
            qnode,
            *param,
            nums_frequency=nums_frequency,
            spectra=spectra,
        )
        if repack_param:
            new_param_step_and_cost = (new_param_step_and_cost, )

        assert np.allclose(
            np.fromiter(_flatten(new_param_step_and_cost), dtype=float),
            np.fromiter(_flatten(new_param_step), dtype=float),
        )
        assert np.isclose(qnode(*param), old_cost)
Exemplo n.º 3
0
    def test_single_step_convergence(self, fun, x_min, param, nums_freq,
                                     exp_num_calls, substep_optimizer,
                                     substep_kwargs):
        """Tests convergence for easy classical functions in a single Rotosolve step.
        Includes testing of the parameter output shape and the old cost when using step_and_cost."""
        opt = RotosolveOptimizer(substep_optimizer, substep_kwargs)

        # Make only the first argument trainable
        param = (np.array(param[0], requires_grad=True), ) + param[1:]
        # Only one argument is marked as trainable -> All other arguments have to stay fixed
        new_param_step = opt.step(
            fun,
            *param,
            nums_frequency=nums_freq,
        )
        # The following accounts for the unpacking functionality for length-1 param
        if len(param) == 1:
            new_param_step = (new_param_step, )

        assert all(
            np.allclose(p, new_p)
            for p, new_p in zip(param[1:], new_param_step[1:]))

        # With trainable parameters, training should happen
        param = tuple(np.array(p, requires_grad=True) for p in param)
        new_param_step = opt.step(
            fun,
            *param,
            nums_frequency=nums_freq,
        )
        # The following accounts for the unpacking functionality for length-1 param
        if len(param) == 1:
            new_param_step = (new_param_step, )

        assert len(x_min) == len(new_param_step)
        assert np.allclose(
            np.fromiter(_flatten(x_min), dtype=float),
            np.fromiter(_flatten(new_param_step), dtype=float),
            atol=1e-5,
        )

        # Now with step_and_cost and trainable params
        new_param_step_and_cost, old_cost = opt.step_and_cost(
            fun,
            *param,
            nums_frequency=nums_freq,
        )
        # The following accounts for the unpacking functionality for length-1 param
        if len(param) == 1:
            new_param_step_and_cost = (new_param_step_and_cost, )

        assert len(x_min) == len(new_param_step_and_cost)
        assert np.allclose(
            np.fromiter(_flatten(new_param_step_and_cost), dtype=float),
            np.fromiter(_flatten(new_param_step), dtype=float),
            atol=1e-5,
        )
        assert np.isclose(old_cost, fun(*param))
Exemplo n.º 4
0
def successive_params(par1, par2):
    """Return a list of parameter configurations, successively walking from
    par1 to par2 coordinate-wise."""
    par1_flat = np.fromiter(_flatten(par1), dtype=float)
    par2_flat = np.fromiter(_flatten(par2), dtype=float)
    walking_param = []
    for i in range(len(par1_flat) + 1):
        walking_param.append(unflatten(np.append(par2_flat[:i], par1_flat[i:]), par1))
    return walking_param
Exemplo n.º 5
0
    def test_number_of_function_calls(
        self, fun, x_min, param, num_freq, optimizer, optimizer_kwargs
    ):
        """Tests that per parameter 2R+1 function calls are used for an update step."""
        global num_calls
        num_calls = 0

        def _fun(*args, **kwargs):
            global num_calls
            num_calls += 1
            return fun(*args, **kwargs)

        opt = RotosolveOptimizer()
        new_param = opt.step(
            _fun,
            *param,
            num_freqs=num_freq,
            optimizer=optimizer,
            optimizer_kwargs=optimizer_kwargs,
        )

        expected_num_calls = np.sum(
            np.fromiter(_flatten(expand_num_freq(num_freq, param)), dtype=int) * 2 + 1
        )
        assert num_calls == expected_num_calls
Exemplo n.º 6
0
    def step(self, objective_fn, x, generators):
        r"""Update x with one step of the optimizer.

        Args:
            objective_fn (function): The objective function for optimization. It must have the
                signature ``objective_fn(x, generators=None)`` with a sequence of the values ``x``
                and a list of the gates ``generators`` as inputs, returning a single value.
            x (Union[Sequence[float], float]): Sequence containing the initial values of the
                variables to be optimized over, or a single float with the initial value.
            generators (list[~.Operation]): List containing the initial ``pennylane.ops.qubit``
                operators to be used in the circuit and optimized over.

        Returns:
            array: The new variable values :math:`x^{(t+1)}` as well as the new generators.
        """
        x_flat = np.fromiter(_flatten(x), dtype=float)

        try:
            assert len(x_flat) == len(generators)
        except AssertionError:
            raise ValueError(
                "Number of parameters {} must be equal to the number of generators.".format(x)
            )

        for d, _ in enumerate(x_flat):
            x_flat[d], generators[d] = self._find_optimal_generators(
                objective_fn, x_flat, generators, d
            )

        return unflatten(x_flat, x), generators
Exemplo n.º 7
0
    def test_single_step_convergence(
        self, fun, x_min, param, num_freq, optimizer, optimizer_kwargs
    ):
        """Tests convergence for easy classical functions in a single Rotosolve step.
        Includes testing of the parameter output shape and the old cost when using step_and_cost."""
        opt = RotosolveOptimizer()

        new_param_step = opt.step(
            fun,
            *param,
            num_freqs=num_freq,
            optimizer=optimizer,
            optimizer_kwargs=optimizer_kwargs,
        )
        # The following accounts for the unpacking functionality for length-1 param
        if len(param) == 1:
            new_param_step = (new_param_step,)

        assert len(x_min) == len(new_param_step)
        assert np.allclose(
            np.fromiter(_flatten(x_min), dtype=float),
            np.fromiter(_flatten(new_param_step), dtype=float),
            atol=1e-5,
        )

        new_param_step_and_cost, old_cost = opt.step_and_cost(
            fun,
            *param,
            num_freqs=num_freq,
            optimizer=optimizer,
            optimizer_kwargs=optimizer_kwargs,
        )
        # The following accounts for the unpacking functionality for length-1 param
        if len(param) == 1:
            new_param_step_and_cost = (new_param_step_and_cost,)

        assert len(x_min) == len(new_param_step_and_cost)
        assert np.allclose(
            np.fromiter(_flatten(new_param_step_and_cost), dtype=float),
            np.fromiter(_flatten(new_param_step), dtype=float),
            atol=1e-5,
        )
        assert np.isclose(old_cost, fun(*param))
Exemplo n.º 8
0
    def test_single_step(self, fun, x_min, param, num_freq):
        """Tests convergence for easy classical functions in a single Rotosolve step
        with some arguments deactivated for training.
        Includes testing of the parameter output shape and the old cost when using step_and_cost."""
        substep_optimizer = "brute"
        substep_kwargs = None
        opt = RotosolveOptimizer(substep_optimizer, substep_kwargs)

        new_param_step = opt.step(
            fun,
            *param,
            nums_frequency=num_freq,
        )
        # The following accounts for the unpacking functionality for length-1 param
        if len(param) == 1:
            new_param_step = (new_param_step, )

        assert len(x_min) == len(new_param_step)
        assert np.allclose(
            np.fromiter(_flatten(x_min), dtype=float),
            np.fromiter(_flatten(new_param_step), dtype=float),
            atol=1e-5,
        )

        new_param_step_and_cost, old_cost = opt.step_and_cost(
            fun,
            *param,
            nums_frequency=num_freq,
        )
        # The following accounts for the unpacking functionality for length-1 param
        if len(param) == 1:
            new_param_step_and_cost = (new_param_step_and_cost, )

        assert len(x_min) == len(new_param_step_and_cost)
        assert np.allclose(
            np.fromiter(_flatten(new_param_step_and_cost), dtype=float),
            np.fromiter(_flatten(new_param_step), dtype=float),
            atol=1e-5,
        )
        assert np.isclose(old_cost, fun(*param))
Exemplo n.º 9
0
def test_multiple_steps(fun, x_min, param, num_freq):
    """Tests that repeated steps execute as expected."""
    opt = RotosolveOptimizer()

    optimizer = "brute"
    optimizer_kwargs = None
    for _ in range(3):
        param = opt.step(
            fun,
            *param,
            num_freqs=num_freq,
            optimizer=optimizer,
            optimizer_kwargs=optimizer_kwargs,
        )
        # The following accounts for the unpacking functionality for length-1 param
        if len(x_min) == 1:
            param = (param,)

    assert (np.isscalar(x_min) and np.isscalar(param)) or len(x_min) == len(param)
    assert np.allclose(
        np.fromiter(_flatten(x_min), dtype=float),
        np.fromiter(_flatten(param), dtype=float),
        atol=1e-5,
    )
Exemplo n.º 10
0
def test_multiple_steps(fun, x_min, param, num_freq):
    """Tests that repeated steps execute as expected."""
    param = tuple(np.array(p, requires_grad=True) for p in param)
    substep_optimizer = "brute"
    substep_kwargs = None
    opt = RotosolveOptimizer(substep_optimizer, substep_kwargs)

    for _ in range(3):
        param = opt.step(
            fun,
            *param,
            nums_frequency=num_freq,
        )
        # The following accounts for the unpacking functionality for length-one param
        if len(x_min) == 1:
            param = (param, )

    assert (np.isscalar(x_min)
            and np.isscalar(param)) or len(x_min) == len(param)
    assert np.allclose(
        np.fromiter(_flatten(x_min), dtype=float),
        np.fromiter(_flatten(param), dtype=float),
        atol=1e-5,
    )
Exemplo n.º 11
0
    def step(self, objective_fn, x):
        r"""Update x with one step of the optimizer.

        Args:
            objective_fn (function): The objective function for optimization. It should take a
                sequence of the values ``x`` and a list of the gates ``generators`` as inputs, and
                return a single value.
            x (Union[Sequence[float], float]): Sequence containing the initial values of the
                variables to be optimized over, or a single float with the initial value.

        Returns:
            array: The new variable values :math:`x^{(t+1)}`.
        """
        x_flat = np.fromiter(_flatten(x), dtype=float)

        for d, _ in enumerate(x_flat):
            x_flat = self._rotosolve(objective_fn, x_flat, d)

        return unflatten(x_flat, x)
Exemplo n.º 12
0
            def gradient_product(g):
                """Vector-Jacobian product operator.

                Args:
                    g (array[float]): scalar or vector multiplying the Jacobian
                        from the left (output side)

                Returns:
                    nested Sequence[float]: vector-Jacobian product, arranged
                    into the nested structure of the input arguments in ``args``
                """
                diff_indices = None
                non_diff_indices = set()

                for arg, arg_variable in zip(args, self.arg_vars):
                    if not getattr(arg, "requires_grad", True):
                        indices = [i.idx for i in _flatten(arg_variable)]
                        non_diff_indices.update(indices)

                if non_diff_indices:
                    diff_indices = set(range(
                        self.num_variables)) - non_diff_indices

                # Jacobian matrix of the circuit
                jac = self.jacobian(args, kwargs, wrt=diff_indices)

                if not g.shape:
                    vjp = g * jac  # numpy treats 0d arrays as scalars, hence @ cannot be used
                else:
                    vjp = g @ jac

                if non_diff_indices:
                    # Autograd requires we return a gradient of size (num_variables,)
                    res = zeros([self.num_variables])
                    indices = fromiter(diff_indices, dtype=int64)
                    res[indices] = vjp
                    vjp = res

                # Restore the nested structure of the input args.
                vjp = unflatten(vjp.flat, args)
                return vjp