Exemplo n.º 1
0
def diff_gibbs(rho, H):
    b = Gibbs.rho2beta(H, rho)
    #print(b)
    grho = Gibbs.beta2rho(H, b)
    #l=Rho.compare(rho, grho)
    m = Rho.compare_all(rho, grho)
    return m
Exemplo n.º 2
0
def test_reverse_engineering(n, k, nit=50, tol=1e-4):
    '''Optimize H^2 with exact solution,
    and then displace H^2 or rho by local unitary'''
    H = Hamilton_XZ(n)['H']
    H2 = H @ H
    w, v = la.eigh(H2)
    rho = np.sort(np.diag(Rho.rho_prod_even(n, n * k)))[::-1]
    rho = v @ np.diag(rho) @ v.T.conj()
    mini = opt.min_expect(H2, rho)
    assert abs(trace2(rho, H2) - mini) < 1e-6
    Y = LayersDense(rho, H2=H2, D=3)
    for ind in Y.indices:
        Y[ind] = rand_unitary([4, 4], amp=0.1)
    rho2 = Y.contract_rho()
    assert trace2(rho2, H2) > mini
    #print(trace2(rho2, H2).real, mini)
    Y = LayersDense(rho2, H2=H2, D=3)
    last = np.inf
    for i in range(1000):
        l = minimizeVarE_cycle(Y)
        if last - l[-1] < tol * l[-1] / 100:
            break
        last = l[-1]
    print("Last", i, last, l)
    assert abs(last - mini) < 3 * tol * abs(max(
        mini, 1)), "Global minimum for local Hamiltonian not found"
Exemplo n.º 3
0
def testConvergence(Hf, arg_tpl, rs=None):
    H = Hf(n, delta, g, rs)
    R = np.empty([nit, 2**n, 2**n], dtype='complex128')
    for i in range(nit):
        rho = rand_rotate(Rho.rho_prod_even(n, s), np.random)
        R[i] = minimize_var(H, rho, nit=10000)
    w, v = la.eigh(H)
    print(w)
    print(la.eigh(rho)[0])
    for i in range(1, nit):
        rho = v.T.conj() @ R[i] @ v
        print(diag(rho).real)
Exemplo n.º 4
0
def mean_diag(Hf, arg_tpl, _optimize, arg_opt, arg_clt):
    n = arg_tpl['n']
    D = arg_opt['D']
    res = loadData(Hf,
                   arg_tpl,
                   _optimize,
                   arg_opt,
                   arg_clt,
                   pre="_D={}".format(D))
    H, R, nit, S = res['H'], res['rho'], res['nit'], res['S']
    dif = np.empty([nit])
    i = 0
    for j in range(nit):
        grho = Gibbs.beta2rho(H[i, j], 0)
        dif[j] = np.mean(np.diag(Rho.compare_all(R[i, j], grho)))
    return dif
Exemplo n.º 5
0
def test_local_H2(n, choice, k=0.5, d=2, tol=1e-4):
    '''Test local Hamiltonian H^2'''

    if choice == "prod":
        H2 = reduce(np.kron, [np.diag([2, 1])] * n)
    else:
        H2 = nearest(n, np.diag([-1, 1]))
    rho = Rho.rho_prod_even(n, n * k, rs=np.random)
    Y = LayersDense(rho, H2=H2, D=d)
    mini = opt.min_expect(H2, rho)
    last = np.inf
    for i in range(50):
        l = minimizeVarE_cycle(Y)
        if last - l[-1] < tol * l[-1] / 100:
            break
        last = l[-1]
    assert abs(last - mini) < 3 * tol * abs(max(
        mini, 1)), "Global minimum for local Hamiltonian not found"
Exemplo n.º 6
0
def test_small_chain_varE(n, k, nit=50):
    '''Test function are really optimized'''

    tol = {2: 1e-6, 3: 1e-2, 4: 0.1}
    H = Hamilton_XZ(n)['H']
    rho = Rho.rho_prod_even(n, n * k, rs=np.random)
    mini = opt.exact_min_varE(H, rho)
    Y = LayersDense(rho, H, D=4**(n - 2) // (n - 1) * 2 + 1)
    last = np.inf
    for i in range(nit):
        l = minimizeVarE_cycle(Y)
        assert all(l[:-1] + 1e-6 >= l[1:])
        assert l[0] <= last + 1e-6
        if last - l[-1] < tol[n] * max(l[-1], 1) / 100:
            break
        last = l[-1]
    assert abs(last - mini) < 3 * tol[n] * max(
        mini, 1), "Global minimum for varE not found"
Exemplo n.º 7
0
def Collect(Hf,
            arg_tpl,
            _optimize,
            arg_opt,
            rs=np.random,
            rs_rot=np.random,
            ns=11,
            nit=10,
            pre=''):
    n = arg_tpl['n']
    if isinstance(ns, int):
        s = mlinspace(ns)
    else:
        ns, s = len(ns), np.array(ns)
    S = n * s
    R = np.empty([ns, nit, 2**n, 2**n], dtype='complex128')
    H = np.empty_like(R)
    for j in range(nit):
        print("itering", j)
        H4 = Hf(**generate_args(arg_tpl, rs))['H']
        for i, s in enumerate(S):
            print("Entropy S", s)
            print(arg_tpl, s)
            #H4 = Hf(**generate_args(arg_tpl, rs))['H']
            rho = Rho.rho_prod_even(n, s, rs=rs_rot)
            #print(rho, H4)
            print(i, j, H.shape)
            H[i, j] = H4
            R[i, j] = _optimize(H4, rho, **arg_opt)
    result = {
        'Hamilton': Hf.__name__,
        'S': S,
        'nit': nit,
        'rho': R,
        'H': H,
        **arg_tpl
    }
    np.save(fname(Hf, arg_tpl, pre=pre), result)
    print(fname(Hf, arg_tpl, pre=pre), 'Data saved!')
    return result
Exemplo n.º 8
0
def draw_diff_rho(Hf, arg_tpl, _optimize, arg_opt, arg_clt):
    n = arg_tpl['n']
    res = loadData(Hf, arg_tpl, _optimize, arg_opt, arg_clt)
    print(res)
    H, R, nit, S = res['H'], res['rho'], res['nit'], res['S']
    dif = np.empty([len(S), nit, 2 * n - 1])
    for i, s in enumerate(S):
        for j in range(nit):
            #b = Gibbs.rho2beta(H[i, j], R[i, j])
            #grho = Gibbs.beta2rho(H[i, j], b)
            grho = np.eye(*R[i, j].shape)
            #print(b, grho)
            dif[i, j] = Rho.compare(R[i, j], grho)
            v = la.eigvalsh(H[i, j])
            #print("bE", b, b*(v[0]-v[-1]).real)
    mdif = mean(dif, axis=1)
    sdif = std(dif, axis=1, ddof=1) / np.sqrt(nit)
    #print(dif[0, 0])
    cla()
    for i, s in enumerate(S):
        errorbar(arange(-n + 1, n),
                 mdif[i],
                 sdif[i],
                 label="S={:.1f}".format(s),
                 capsize=1.5)
    grid()
    title('Diff between rho for {}'.format(info(Hf, arg_tpl)))
    xlabel('Number of traced sites, +/- mean left/right')
    ylabel(r'$|\mathrm{tr}[\rho-\rho_G]|$')
    legend()
    savefig(fname(Hf, arg_tpl, "figures", "rho-diff.pdf"))
    print('-' * 30)
    print(mdif)
    l = (mdif[:, n // 2 - 1] + mdif[:, -n // 2]) / 2

    return l
Exemplo n.º 9
0
def draw_diff_matrix(Hf, arg_tpl, _optimize, arg_opt, arg_clt):
    n = arg_tpl['n']
    D = arg_opt['D']
    res = loadData(Hf,
                   arg_tpl,
                   _optimize,
                   arg_opt,
                   arg_clt,
                   pre="_D={}".format(D))
    H, R, nit, S = res['H'], res['rho'], res['nit'], res['S']
    dif = np.empty([len(S), nit, n, n])
    px = np.empty([len(S), nit, n])
    pz = np.empty([len(S), nit, n])
    E = np.empty([len(S), nit])
    varE = np.empty([len(S), nit])
    for i, s in enumerate(S):
        print('=' * 50)
        print(">>> Entropy S={}".format(s))
        for j in range(nit):
            E[i, j] = trace2(H[i, j], R[i, j]).real
            b = Gibbs.rho2beta(H[i, j], R[i, j])
            grho = Gibbs.beta2rho(H[i, j], 0)
            varE[i, j] = trace2(H[i, j] @ H[i, j], R[i, j]).real - E[i, j]**2
            #print(b, grho)
            dif[i, j] = Rho.compare_all(R[i, j], grho)
            #dif[i, j] = diff_gibbs(R[i, j], H[i, j])
            px[i, j] = polarization(R[i, j], n, 1)**2
            pz[i, j] = polarization(R[i, j], n, 3)**2
            print('-' * 50)
            print("Energy", E[i, j])
            print("Variance", varE[i, j])
            #px = polarization(R[i, j], n, 1)
            #py = polarization(R[i, j], n, 2)
            #pz = polarization(R[i, j], n, 3)
            #print((px**2+py**2+pz**2)/2)
            print(np.diag(dif[i, j]))
            #print(b, px[i, j])
            #v = la.eigvalsh(H[i, j])
            #print("bE", b, b*(v[0]-v[-1]).real)
    mdif = mean(dif, axis=1)
    #mvar = var(dif, axis=1, ddof=1)
    mpx = mean(px, axis=1)
    mpz = mean(pz, axis=1)
    mE = mean(E, axis=1)
    mvarE = mean(varE, axis=1)
    #sdif = std(dif, axis=1, ddof=1) / np.sqrt(nit)
    #print(dif[0, 0])
    #plt.close("all")
    #clf()
    #plot_diff_diag(mdif, mvar, arg_tpl, S/n, arg_opt['D'])
    #savefig(fname(Hf, arg_tpl, "figures", "rho-diff-diag.pdf", pre="_D={:02d}".format(D), align=True))
    ##plt.close("all")
    clf()
    plot_diff(diff(mdif[0]), arg_tpl, S / n, arg_opt['D'])
    savefig(
        fname(Hf,
              arg_tpl,
              "figures",
              "rho-diff.pdf",
              pre="_D={:02d}".format(D),
              align=True))
    plt.close("all")
    clf()
Exemplo n.º 10
0
import ETH.optimization as opt
from ETH.basic import *
import ETH.Rho as Rho
from DMRG.Ising import Hamilton_XZ
import scipy.linalg as la
import numpy as np

n = 2
H = Hamilton_XZ(n)['H']
H2 = H @ H
rho = Rho.rho_prod_even(n, n * 0.5)
rho = rand_rotate(rho)
V = np.einsum('jk, li->ijkl', rho, H)
V2 = np.einsum('jk, li->ijkl', rho, H2)


def test_local_optimization():
    U, got = opt.minimize_quadratic_local(V2, nit=200)
    expected = opt.min_expect(rho, H2)
    assert abs(expected - got) < 1e-6


def meta_test_df(df, f, eps):
    M, f1, f2 = df
    U = la.expm(1j * eps * M)
    f1_r = (f(U) - f()) / eps
    f1_l = (f() - f(U.T.conj())) / eps
    f1_mean = (f1_r + f1_l) / 2
    f2_num = (f1_r - f1_l) / eps
    print(f1_l, f1_r, f1, f2, f2_num)
    assert abs(f1_mean - f1) < eps
Exemplo n.º 11
0
import numpy as np
from .layers.layers_dense import LayersDense
from .layers.layers_mpo import LayersMPO, MPO_TL, ud2rl
from .basic import trace2, rand_unitary
from numpy.random import RandomState
from functools import reduce
if __name__ == "__main__":
    from ETH import Rho
    from DMRG.Ising import Hamilton_XZ, Hamilton_TL
    n = 9  # dof = 2**(2n) = 64
    d = 6  # At least 2**(2(n-2))
    rs = np.random  # RandomState(123581321)
    rho = Rho.rho_even(n, n / 2, amp=0.1, rs=rs)
    H = Hamilton_TL(n, 1, 1, 1)['H']
    mpos = MPO_TL(1, 1, 1)
    L = LayersMPO(rho, mpos[0], d, n - 1, mpos[1], offset=0)
    Y = LayersDense(Rho.product_rho(rho), H, D=d)
    for i in Y.indices:
        print(i)
        Y[i] = rand_unitary([4, 4], rs=rs)
        L[i[::-1]] = ud2rl(Y[i].T.conj())
    R = Y.contract_rho()
    print(trace2(R, H @ H).real)
    for i, l, r in L.sweep(L.H2, 1):
        l = L.apply_pair(i, l, L.H2)
        print(np.dot(l.flatten(), r.flatten()).real)
    #h = [np.einsum('ijkl, lk->ij', L.H2, r) for r in rho]
    #print(reduce(np.matmul, h)[0,-1].real)
    #h = [np.einsum('ijkl, lk->ij', L.H, r.conj()) for r in rho]
    #print(reduce(np.matmul, h)[0,-1].real)
Exemplo n.º 12
0
        for i in range(N):
            # Forward
            for j in self.indices[0:nblocks - 1]:
                yield j, L[-1], R[-1]
                L.append(self.apply_pair(j, L[-1], mpo))
                R.pop()
            # Backward
            for j in self.indices[nblocks - 1:0:-1]:
                yield j, L[-1], R[-1]
                L.pop()
                R.append(self.apply_pair(j, R[-1], mpo, True))


if __name__ == "__main__":
    from ETH import Rho
    from numpy.random import RandomState
    l = 4
    # [np.array([[0.7,0],[0,0.3]]) for i in range(l)]
    rho = Rho.rho_even(l, l / 2, amp=0.4, rs=RandomState(123581321))
    H, H2 = MPO_TL(J=1, g=1, h=1)
    L = LayersMPO(rho, H, 2, l - 1, H2, offset=0)
    Left = L.init_operator(H, row=0)
    #print(L.apply_pair((0,0), Left, H).flatten())
    print("Final", L.contract_all(L.H).real)
    h = [np.einsum('ijkl, lk->ij', H, r) for r in rho]
    print(reduce(np.matmul, h)[0, -1].real)
    print("MPO 1", np.einsum("i, jk->kij", h[0][0], rho[1]).flatten())
    #print(np.einsum("ijk, kj, lm->mil", H[0], rho[0], rho[1]).flatten())
    #print("MPO 2", np.einsum("i, iklm->klm", (h[0]@h[1])[0], H).transpose([1,0,2]).flatten())
    #print("MPO 3", np.einsum("i, ikll, mn->nkm", (h[0]@h[1])[0], H, rho[0]).flatten())