def test_parallel_parametrized_circuit(): """Evaluate circuit for multiple parameters.""" if 'GPU' in get_device(): # pragma: no cover pytest.skip("unsupported configuration") original_threads = get_threads() set_threads(1) nqubits = 5 nlayers = 10 c = Circuit(nqubits) for l in range(nlayers): c.add((gates.RY(q, theta=0) for q in range(nqubits))) c.add((gates.CZ(q, q + 1) for q in range(0, nqubits - 1, 2))) c.add((gates.RY(q, theta=0) for q in range(nqubits))) c.add((gates.CZ(q, q + 1) for q in range(1, nqubits - 2, 2))) c.add(gates.CZ(0, nqubits - 1)) c.add((gates.RY(q, theta=0) for q in range(nqubits))) size = len(c.get_parameters()) np.random.seed(0) parameters = [np.random.uniform(0, 2 * np.pi, size) for i in range(10)] state = None r1 = [] for params in parameters: c.set_parameters(params) r1.append(c(state)) r2 = parallel_parametrized_execution(c, parameters=parameters, initial_state=state, processes=2) np.testing.assert_allclose(r1, r2) set_threads(original_threads)
def test_get_parameters(): c = Circuit(3) c.add(gates.RX(0, theta=0.123)) c.add(gates.RY(1, theta=0.456)) c.add(gates.CZ(1, 2)) c.add(gates.fSim(0, 2, theta=0.789, phi=0.987)) c.add(gates.H(2)) unitary = np.array([[0.123, 0.123], [0.123, 0.123]]) c.add(gates.Unitary(unitary, 1)) params = [0.123, 0.456, (0.789, 0.987), unitary] assert params == c.get_parameters() params = [0.123, 0.456, (0.789, 0.987), unitary] assert params == c.get_parameters() params = { c.queue[0]: 0.123, c.queue[1]: 0.456, c.queue[3]: (0.789, 0.987), c.queue[5]: unitary } assert params == c.get_parameters("dict") params = [0.123, 0.456, 0.789, 0.987] params.extend(unitary.ravel()) assert params == c.get_parameters("flatlist") with pytest.raises(ValueError): c.get_parameters("test")
def test_get_parameters(trainable, include_not_trainable): c = Circuit(3) c.add(gates.RX(0, theta=0.123)) c.add(gates.RY(1, theta=0.456, trainable=trainable)) c.add(gates.CZ(1, 2)) c.add(gates.fSim(0, 2, theta=0.789, phi=0.987, trainable=trainable)) c.add(gates.H(2)) unitary = np.array([[0.123, 0.123], [0.123, 0.123]]) c.add(gates.Unitary(unitary, 1)) if trainable or include_not_trainable: params = { "list": [0.123, 0.456, (0.789, 0.987), unitary], "dict": { c.queue[0]: 0.123, c.queue[1]: 0.456, c.queue[3]: (0.789, 0.987), c.queue[5]: unitary }, "flatlist": [0.123, 0.456, 0.789, 0.987] } else: params = { "list": [0.123, unitary], "dict": { c.queue[0]: 0.123, c.queue[5]: unitary }, "flatlist": [0.123] } params["flatlist"].extend(unitary.ravel()) for fmt, prm in params.items(): assert c.get_parameters(fmt, include_not_trainable) == prm with pytest.raises(ValueError): c.get_parameters("test")