Exemple #1
0
def test_total_type_jumble(simulator,value1=(numpy.random.randint(10, 1000) / 1000.0 * (numpy.pi / 2.0)),
                value2=(numpy.random.randint(10, 1000) / 1000.0 * (numpy.pi / 2.0))):
    a = Variable('a')
    b = Variable('b')
    values = {a: value1, b: value2}
    H1 = tq.paulis.X(0)
    H2 = tq.paulis.Y(0)
    U1= tq.gates.Ry(angle=a,target=0)
    U2= tq.gates.Rx(angle=b,target=0)
    e1=ExpectationValue(U1,H1)
    e2=ExpectationValue(U2,H2)
    stacked= tq.objective.vectorize([e1, e2])
    stacked = stacked*a*e2
    out=simulate(stacked,variables=values,backend=simulator)
    v1=out[0]
    v2=out[1]
    appendage =  a(values) * -np.sin(b(values))
    an1= np.sin(a(values)) *  appendage
    an2= -np.sin(b(values)) * appendage
    assert np.isclose(v1+v2,an1+an2)
    # not gonna contract, lets make gradient do some real work
    ga=grad(stacked,a)
    gb=grad(stacked,b)
    la=[tq.simulate(x,variables=values) for x in ga]
    print(la)
    lb=[tq.simulate(x,variables=values) for x in gb]
    print(lb)
    tota=np.sum(np.array(la))
    totb=np.sum(np.array(lb))
    gan1= np.cos(a(values)) * appendage + (np.sin(a(values)) * -np.sin(b(values))) - (np.sin(b(values)) * -np.sin(b(values)))
    gan2= np.sin(a(values)) * a(values) * -np.cos(b(values)) + 2 * (-np.cos(b(values)) * appendage)
    assert np.isclose(tota+totb,gan1+gan2)
Exemple #2
0
def test_really_awfull_thing(simulator, value1=(numpy.random.randint(10, 1000) / 1000.0 * (numpy.pi / 2.0)),
                             value2=(numpy.random.randint(10, 1000) / 1000.0 * (numpy.pi / 2.0))):
    angle1 = Variable(name="angle1")
    angle2 = Variable(name="angle2")
    variables = {angle1: value1, angle2: value2}

    prod = angle1 * angle2
    qubit = 0
    control = None
    H = paulis.Y(qubit=qubit)
    U = gates.Rx(target=qubit, control=control, angle=prod)
    Up = gates.Rx(target=qubit, control=control, angle=prod + np.pi / 2)
    Down = gates.Rx(target=qubit, control=control, angle=prod - np.pi / 2)
    e1 = ExpectationValue(U=U, H=H)
    en1 = simulate(e1, variables=variables, backend=simulator)
    uen = simulate(0.5 * ExpectationValue(Up, H), variables=variables, backend=simulator)
    den = simulate(-0.5 * ExpectationValue(Down, H), variables=variables, backend=simulator)
    an1 = -np.sin(prod(variables=variables))
    anval = prod(variables=variables)
    an2 = angle2(variables=variables)
    added = angle1 * e1
    raised = added.wrap(np.sin)
    dO = grad(raised, 'angle1')
    dE = grad(e1, 'angle1')
    dA = grad(added, 'angle1')
    val = simulate(added, variables=variables, backend=simulator)
    dave = simulate(dA, variables=variables, backend=simulator)
    deval = simulate(dE, variables=variables, backend=simulator)
    doval = simulate(dO, variables=variables, backend=simulator)
    dtrue = np.cos(val) * dave
    assert np.isclose(en1, an1, atol=1.e-4)
    assert np.isclose(deval, an2 * (uen + den), atol=1.e-4)
    assert np.isclose(doval, dtrue, atol=1.e-4)
Exemple #3
0
def test_exotic_gradients(gradvar):
    # a and b will fail for autograd not with jax
    a = Variable('a')
    b = Variable('b')
    c = Variable('c')
    d = Variable('d')
    e = Variable('e')
    f = Variable('f')
    variables = {a: 2.0, b: 3.0, c: 4.0, d: 5.0, e: 6.0, f: 7.0}

    t = c * a**b + b / c - Objective(
        args=[c], transformation=np.cos) + f / (d * e) + a * Objective(
            args=[d], transformation=np.exp) / (f + b) + Objective(
                args=[e], transformation=np.tanh) + Objective(
                    args=[f], transformation=np.sinc)
    g = grad(t, gradvar)
    if gradvar == 'a':
        assert np.isclose(
            g(variables),
            c(variables) * b(variables) * (a(variables)**(b(variables) - 1.)) +
            np.exp(d(variables)) / (f(variables) + b(variables)))
    if gradvar == 'b':
        assert np.isclose(
            g(variables),
            (c(variables) * a(variables)**b(variables)) * np.log(a(variables))
            + 1. / c(variables) - a(variables) * np.exp(d(variables)) /
            (f(variables) + b(variables))**2.0)
    if gradvar == 'c':
        assert np.isclose(
            g(variables),
            a(variables)**b(variables) - b(variables) / c(variables)**2. +
            np.sin(c(variables)))
    if gradvar == 'd':
        assert np.isclose(
            g(variables),
            -f(variables) / (np.square(d(variables)) * e(variables)) +
            a(variables) * np.exp(d(variables)) /
            (f(variables) + b(variables)))
    if gradvar == 'e':
        assert np.isclose(
            g(variables), 2. / (1. + np.cosh(2 * e(variables))) -
            f(variables) / (d(variables) * e(variables)**2.))
    if gradvar == 'f':
        assert np.isclose(
            g(variables), 1. / (d(variables) * e(variables)) -
            a(variables) * np.exp(d(variables)) /
            (f(variables) + b(variables))**2. +
            np.cos(np.pi * f(variables)) / f(variables) -
            np.sin(np.pi * f(variables)) / (np.pi * f(variables)**2.))
Exemple #4
0
 def f(x):
     return np.cos(x)**2. + np.sin(x)**2.