Exemple #1
0
def main(nshots, backend):
    '''Variationally find a maximally entangled state and the correct measurement angle
       for violation of Bell inequalities.
    Args:
        nshots: number of shots to use for the minimization.
        backend: choice of backend to run the example in.
        
    '''
    set_backend(backend)
    initial_parameters = np.random.uniform(0, 2 * np.pi, 2)
    circuits = set_parametrized_circuits()
    best, params, _ = optimize(cost_function,
                               initial_parameters,
                               args=(circuits, nshots))
    print(f'Cost: {best}\n')
    print(f'Parameters: {params}\n')
    print(f'Angles for the RY gates: {(params*180/np.pi)%360} in degrees.\n')
    frequencies = []
    for circuit in circuits:
        circuit.set_parameters(params)
        frequencies.append(circuit(nshots=nshots).frequencies())
    chsh = compute_chsh(frequencies, nshots)
    print(f'CHSH inequality value: {chsh}\n')
    print(f'Target: {np.sqrt(2)*2}\n')
    print(
        f'Relative distance: {100*np.abs(np.abs(chsh)-np.sqrt(2)*2)/np.sqrt(2)*2}%\n'
    )
Exemple #2
0
    def minimize(self, initial_parameters, method="BFGS", options=None,
                 messages=False):
        """Optimize the free parameters of the scheduling function.

        Args:
            initial_parameters (np.ndarray): Initial guess for the variational
                parameters that are optimized.
                The last element of the given array should correspond to the
                guess for the total evolution time T.
            method (str): The desired minimization method.
                One of ``"cma"`` (genetic optimizer), ``"sgd"`` (gradient descent) or
                any of the methods supported by
                `scipy.optimize.minimize <https://docs.scipy.org/doc/scipy/reference/generated/scipy.optimize.minimize.html>`_.
            options (dict): a dictionary with options for the different optimizers.
            messages (bool): If ``True`` the loss evolution is shown during
                optimization.
        """
        self.opt_messages = messages
        if method == "sgd":
            loss = self._loss
        else:
            loss = lambda p, ae, h1, msg, hist: K.to_numpy(self._loss(p, ae, h1, msg, hist))

        args = (self, self.hamiltonian.h1, self.opt_messages, self.opt_history)
        result, parameters, extra = optimizers.optimize(
            loss, initial_parameters, args=args, method=method, options=options)
        if isinstance(parameters, K.tensor_types) and not len(parameters.shape): # pragma: no cover
            # some optimizers like ``Powell`` return number instead of list
            parameters = [parameters]
        self.set_parameters(parameters)
        return result, parameters, extra
Exemple #3
0
def test_vqc(method, options, compile, filename):
    """Performs a VQE circuit minimization test."""

    def myloss(parameters, circuit, target):
        circuit.set_parameters(parameters)
        state = circuit().tensor
        return 1 - np.abs(np.dot(np.conj(target), state))

    nqubits = 6
    nlayers  = 4

    # Create variational circuit
    c = models.Circuit(nqubits)
    for l in range(nlayers):
        c.add((gates.RY(q, theta=0) for q in range(nqubits)))
        c.add((gates.CZ(q, q+1) for q in range(0, nqubits-1, 2)))
        c.add((gates.RY(q, theta=0) for q in range(nqubits)))
        c.add((gates.CZ(q, q+1) for q in range(1, nqubits-2, 2)))
        c.add(gates.CZ(0, nqubits-1))
    c.add((gates.RY(q, theta=0) for q in range(nqubits)))

    # Optimize starting from a random guess for the variational parameters
    np.random.seed(0)
    x0 = np.random.uniform(0, 2*np.pi, 2*nqubits*nlayers + nqubits)
    data = np.random.normal(0, 1, size=2**nqubits)

    # perform optimization
    best, params, _ = optimize(myloss, x0, args=(c, data), method=method,
                            options=options, compile=compile)
    if filename is not None:
        utils.assert_regression_fixture(params, filename)