示例#1
0
def swap_local_pair_direct(state, x_pos, y_pos, svd_option):
    if svd_option is None:
        svd_option = ReducedSVD()

    if x_pos[0] < y_pos[0]:  # [x y]^T
        prod_subscripts = 'abcdxp,cfghyq->absdyq,sfghxp'
        scale_u_subscripts = 'absdyq,s->absdyq'
        scale_v_subscripts = 'sfghxp,s->sfghxp'
    elif x_pos[0] > y_pos[0]:  # [y x]^T
        prod_subscripts = 'abcdxp,efahyq->sbcdyq,efshxp'
        scale_u_subscripts = 'sbcdyq,s->sbcdyq'
        scale_v_subscripts = 'efshxp,s->efshxp'
    elif x_pos[1] < y_pos[1]:  # [x y]
        prod_subscripts = 'abcdxp,efgbyq->ascdyq,efgsxp'
        scale_u_subscripts = 'ascdyq,s->ascdyq'
        scale_v_subscripts = 'efgsxp,s->efgsxp'
    elif x_pos[1] > y_pos[1]:  # [y x]
        prod_subscripts = 'abcdxp,edghyq->abcsyq,esghxp'
        scale_u_subscripts = 'abcsyq,s->abcsyq'
        scale_v_subscripts = 'esghxp,s->esghxp'
    else:
        assert False

    x, y = state.grid[x_pos], state.grid[y_pos]
    u, s, v = state.backend.einsumsvd(prod_subscripts, x, y, option=svd_option)
    s = s**0.5
    u = state.backend.einsum(scale_u_subscripts, u, s)
    v = state.backend.einsum(scale_v_subscripts, v, s)
    state.grid[x_pos] = u
    state.grid[y_pos] = v
示例#2
0
 def test_contract_vector(self, backend):
     qstate = peps.random(3, 3, 2, backend=backend)
     statevector = qstate.statevector(contract_option=Snake())
     for contract_option in [BMPS(None), BMPS(ReducedSVD(16)), BMPS(RandomizedSVD(16)), BMPS(ImplicitRandomizedSVD(16))]:
         with self.subTest(contract_option=contract_option):
             contract_result = qstate.statevector(contract_option=contract_option)
             self.assertTrue(backend.allclose(statevector.tensor, contract_result.tensor))
 def test_einsumsvd_options(self, tb):
     from tensorbackends.interface import ReducedSVD, RandomizedSVD, ImplicitRandomizedSVD
     a = tb.astensor(
         [[0, 2e-3j, 0, 0], [1e-3, 0, 0, 0], [0, 0, 3, 0], [0, 0, 0, 4j]],
         dtype=complex)
     p = tb.astensor(
         [[0, 1, 0, 0], [1, 0, 0, 0], [0, 0, 1, 0], [0, 0, 0, 1]],
         dtype=complex)
     s_true = tb.astensor([4, 3])
     low_rank = tb.astensor(
         [[0, 0, 0, 0], [0, 0, 0, 0], [0, 0, 3, 0], [0, 0, 0, 4j]],
         dtype=complex)
     options = [
         ReducedSVD(rank=2),
         RandomizedSVD(rank=2, niter=2, oversamp=1),
         ImplicitRandomizedSVD(rank=2, niter=2)
     ]
     for option in options:
         with self.subTest(option=option):
             u, s, v = tb.einsumsvd('ij,jk->is,sk', p, a, option=option)
             usv = tb.einsum('is,s,sk->ik', u, s, v)
             self.assertEqual(u.shape, (4, 2))
             self.assertEqual(s.shape, (2, ))
             self.assertEqual(v.shape, (2, 4))
             self.assertTrue(tb.allclose(s, s_true))
             self.assertTrue(tb.allclose(usv, low_rank, atol=1e-9))
示例#4
0
 def test_contract_scalar(self, backend):
     qstate = peps.random(3, 4, 2, backend=backend)
     norm = qstate.norm(contract_option=Snake())
     for contract_option in contract_options:
         if contract_option is not Snake:
             for svd_option in (None, ReducedSVD(16), RandomizedSVD(16), ImplicitRandomizedSVD(16), ImplicitRandomizedSVD(16, orth_method='local_gram')):
                 with self.subTest(contract_option=contract_option.__name__, svd_option=svd_option):
                     self.assertTrue(backend.isclose(norm, qstate.norm(contract_option=contract_option(svd_option))))
示例#5
0
 def test_inner_approx(self, backend):
     psi = peps.computational_zeros(2, 3, backend=backend)
     psi.apply_circuit([
         Gate('H', [], [0]),
         Gate('CX', [], [0,3]),
         Gate('H', [], [3]),
     ], update_option=peps.DirectUpdate(ImplicitRandomizedSVD(rank=2)))
     phi = peps.computational_zeros(2, 3, backend=backend)
     contract_option = peps.BMPS(ReducedSVD(rank=2))
     self.assertTrue(backend.isclose(psi.inner(phi, contract_option), 0.5))
示例#6
0
 def test_amplitude_approx(self, backend):
     qstate = peps.computational_zeros(2, 3, backend=backend)
     qstate.apply_circuit([
         Gate('X', [], [0]),
         Gate('H', [], [1]),
         Gate('CX', [], [0,3]),
         Gate('CX', [], [1,4]),
         Gate('S', [], [1]),
     ], update_option=peps.DirectUpdate(ImplicitRandomizedSVD(rank=2)))
     contract_option = peps.BMPS(ReducedSVD(rank=2))
     self.assertTrue(backend.isclose(qstate.amplitude([1,0,0,1,0,0], contract_option), 1/np.sqrt(2)))
     self.assertTrue(backend.isclose(qstate.amplitude([1,1,0,1,1,0], contract_option), 1j/np.sqrt(2)))
示例#7
0
 def test_amplitude_local_gram_qr_svd_update(self, backend):
     qstate = peps.computational_zeros(2, 3, backend=backend)
     qstate.apply_circuit([
         Gate('X', [], [0]),
         Gate('H', [], [1]),
         Gate('CX', [], [0,3]),
         Gate('CX', [], [1,4]),
         Gate('S', [], [1]),
     ], update_option=peps.LocalGramQRSVDUpdate(rank=2))
     contract_option = peps.BMPS(ReducedSVD(rank=2))
     self.assertTrue(backend.isclose(qstate.amplitude([1,0,0,1,0,0], contract_option), 1/np.sqrt(2)))
     self.assertTrue(backend.isclose(qstate.amplitude([1,1,0,1,1,0], contract_option), 1j/np.sqrt(2)))
示例#8
0
 def test_trace_with_cache(self, backend):
     observable = Observable.ZZ(0,1) + Observable.ZZ(0,3)
     contract_option = BMPS(ReducedSVD(1))
     qstate = peps.identity(3, 3, backend=backend)
     cache = qstate.make_trace_cache(contract_option)
     self.assertTrue(backend.isclose(
         qstate.trace(contract_option=contract_option, cache=cache),
         2**qstate.nsite
     ))
     self.assertTrue(backend.isclose(
         qstate.trace(observable, contract_option=contract_option, cache=cache), 
         0, atol=1e-8
     ))
 def test_einsvd_options(self, tb):
     from tensorbackends.interface import ReducedSVD, RandomizedSVD
     a = tb.astensor([[1e-3,0,0,0],[0,2e-3j,0,0],[0,0,3,0],[0,0,0,4j]], dtype=complex).reshape(2,2,2,2)
     s_true = tb.astensor([4,3])
     low_rank = tb.astensor([[0,0,0,0],[0,0,0,0],[0,0,3,0],[0,0,0,4j]], dtype=complex)
     for option in [ReducedSVD(rank=2), RandomizedSVD(rank=2, niter=2, oversamp=1)]:
         with self.subTest(option=option):
             u, s, v = tb.einsvd('ijkl->(ij)s,s(kl)', a, option=option)
             usv = tb.einsum('is,s,sk->ik', u, s, v)
             self.assertEqual(u.shape, (4,2))
             self.assertEqual(s.shape, (2,))
             self.assertEqual(v.shape, (2,4))
             self.assertTrue(tb.allclose(s, s_true))
             self.assertTrue(tb.allclose(usv, low_rank, atol=1e-9))
示例#10
0
 def test_expectation_use_cache_approx(self, backend):
     qstate = peps.computational_zeros(2, 3, backend=backend)
     qstate.apply_circuit([
         Gate('X', [], [0]),
         Gate('CX', [], [0,3]),
         Gate('H', [], [2]),
     ], update_option=peps.DirectUpdate(ImplicitRandomizedSVD(rank=2)))
     observable = 1.5 * Observable.sum([
         Observable.Z(0) * 2,
         Observable.Z(1),
         Observable.Z(2) * 2,
         Observable.Z(3),
     ])
     contract_option = peps.BMPS(ReducedSVD(rank=2))
     self.assertTrue(backend.isclose(qstate.expectation(observable, use_cache=True, contract_option=contract_option), -3))
 def test_einsumsvd_absorb_s(self, tb):
     from tensorbackends.interface import ReducedSVD, RandomizedSVD, ImplicitRandomizedSVD
     a = tb.astensor([[0,2e-3j,0,0],[1e-3,0,0,0],[0,0,3,0],[0,0,0,4j]], dtype=complex)
     p = tb.astensor([[0,1,0,0],[1,0,0,0],[0,0,1,0],[0,0,0,1]], dtype=complex)
     s_true = tb.astensor([4,3])
     low_rank = tb.astensor([[0,0,0,0],[0,0,0,0],[0,0,3,0],[0,0,0,4j]], dtype=complex)
     options = [
         ReducedSVD(rank=2),
         RandomizedSVD(rank=2, niter=2, oversamp=1),
         ImplicitRandomizedSVD(rank=2, niter=2, orth_method='qr'),
         ImplicitRandomizedSVD(rank=2, niter=2, orth_method='local_gram'),
     ]
     for option in options:
         for absorb_s in ['even', 'u', 'v']:
             with self.subTest(option=option):
                 u, _, v = tb.einsumsvd('ij,jk->is,sk', p, a, option=option, absorb_s=absorb_s)
                 usv = tb.einsum('is,sk->ik', u, v)
                 self.assertEqual(u.shape, (4,2))
                 self.assertEqual(v.shape, (2,4))
                 self.assertTrue(tb.allclose(usv, low_rank, atol=1e-9))
示例#12
0
def contract_TRG(state, svd_option_1st=None, svd_option_rem=None):
    """
    Contract the PEPS using Tensor Renormalization Group.

    Parameters
    ----------
    svd_option_1st: tensorbackends.interface.Option, optional
        Parameters for the first SVD in TRG. Will default to tensorbackends.interface.ReducedSVD() if not given.

    
    svd_option_rem: tensorbackends.interface.Option, optional
        Parameters for the remaining SVD truncations. Will perform SVD if given.

    Returns
    -------
    output: state.backend.tensor or scalar
        The contraction result.

    References
    ----------
    https://journals.aps.org/prl/abstract/10.1103/PhysRevLett.99.120601
    https://journals.aps.org/prb/abstract/10.1103/PhysRevB.78.205116
    """
    # base case
    if state.shape <= (2, 2):
        return contract_BMPS(state, svd_option_rem)
    # SVD each tensor into two
    tn = np.empty(state.shape + (2, ), dtype=object)
    for (i, j), tsr in np.ndenumerate(state.grid):
        str_uv = 'abi,icdpq' if (i + j) % 2 == 0 else 'aidpq,bci'
        tn[i, j, 0], _, tn[i, j,
                           1] = state.backend.einsumsvd('abcdpq->' + str_uv,
                                                        tsr,
                                                        option=svd_option_1st
                                                        or ReducedSVD(),
                                                        absorb_s='even')
        tn[i, j,
           (i + j) % 2] = tn[i, j, (i + j) %
                             2].reshape(*(tn[i, j,
                                             (i + j) % 2].shape + (1, 1)))
    return _contract_TRG(state, tn, svd_option_rem)
示例#13
0
文件: update.py 项目: LinjianMa/koala
def apply_local_pair_operator_qr(state, operator, positions, rank):
    assert len(positions) == 2
    svd_option = ReducedSVD(rank)
    x_pos, y_pos = positions
    x, y = state.grid[x_pos], state.grid[y_pos]
    operator = state.backend.astensor(operator)

    if x_pos[0] < y_pos[0]:  # [x y]^T
        split_x_subscripts = 'abcdxp->abdi,icxp'
        split_y_subscripts = 'cfghyq->fghj,jcyq'
        recover_x_subscripts = 'abdi,isup,s->absdup'
        recover_y_subscripts = 'fghj,jsvq,s->sfghvq'
    elif x_pos[0] > y_pos[0]:  # [y x]^T
        split_x_subscripts = 'abcdxp->bcdi,iaxp'
        split_y_subscripts = 'efahyq->efhj,jayq'
        recover_x_subscripts = 'bcdi,isup,s->sbcdup'
        recover_y_subscripts = 'efhj,jsvq,s->efshvq'
    elif x_pos[1] < y_pos[1]:  # [x y]
        split_x_subscripts = 'abcdxp->acdi,ibxp'
        split_y_subscripts = 'efgbyq->efgj,jbyq'
        recover_x_subscripts = 'acdi,isup,s->ascdup'
        recover_y_subscripts = 'efgj,jsvq,s->efgsvq'
    elif x_pos[1] > y_pos[1]:  # [y x]
        split_x_subscripts = 'abcdxp->abci,idxp'
        split_y_subscripts = 'edghyq->eghj,jdyq'
        recover_x_subscripts = 'abci,isup,s->abcsup'
        recover_y_subscripts = 'eghj,jsvq,s->esghvq'
    else:
        assert False

    xq, xr = state.backend.einqr(split_x_subscripts, x)
    yq, yr = state.backend.einqr(split_y_subscripts, y)

    u, s, v = state.backend.einsumsvd('ikxp,jkyq,xyuv->isup,jsvq',
                                      xr,
                                      yr,
                                      operator,
                                      option=svd_option)
    s = s**0.5
    state.grid[x_pos] = state.backend.einsum(recover_x_subscripts, xq, u, s)
    state.grid[y_pos] = state.backend.einsum(recover_y_subscripts, yq, v, s)
示例#14
0
def swap_local_pair_qr(state, x_pos, y_pos, rank):
    svd_option = ReducedSVD(rank)

    if x_pos[0] < y_pos[0]:  # [x y]^T
        split_x_subscripts = 'abcdxp->abdi,icxp'
        split_y_subscripts = 'cfghyq->fghj,jcyq'
        recover_x_subscripts = 'abdi,isyq,s->absdyq'
        recover_y_subscripts = 'fghj,jsxp,s->sfghxp'
    elif x_pos[0] > y_pos[0]:  # [y x]^T
        split_x_subscripts = 'abcdxp->bcdi,iaxp'
        split_y_subscripts = 'efahyq->efhj,jayq'
        recover_x_subscripts = 'bcdi,isyq,s->sbcdyq'
        recover_y_subscripts = 'efhj,jsxp,s->efshxp'
    elif x_pos[1] < y_pos[1]:  # [x y]
        split_x_subscripts = 'abcdxp->acdi,ibxp'
        split_y_subscripts = 'efgbyq->efgj,jbyq'
        recover_x_subscripts = 'acdi,isyq,s->ascdyq'
        recover_y_subscripts = 'efgj,jsxp,s->efgsxp'
    elif x_pos[1] > y_pos[1]:  # [y x]
        split_x_subscripts = 'abcdxp->abci,idxp'
        split_y_subscripts = 'edghyq->eghj,jdyq'
        recover_x_subscripts = 'abci,isyq,s->abcsyq'
        recover_y_subscripts = 'eghj,jsxp,s->esghxp'
    else:
        assert False

    x, y = state.grid[x_pos], state.grid[y_pos]

    xq, xr = state.backend.einqr(split_x_subscripts, x)
    yq, yr = state.backend.einqr(split_y_subscripts, y)

    u, s, v = state.backend.einsumsvd('ikxp,jkyq->isyq,jsxp',
                                      xr,
                                      yr,
                                      option=svd_option)
    s = s**0.5
    state.grid[x_pos] = state.backend.einsum(recover_x_subscripts, xq, u, s)
    state.grid[y_pos] = state.backend.einsum(recover_y_subscripts, yq, v, s)
示例#15
0
文件: update.py 项目: LinjianMa/koala
def apply_local_pair_operator_direct(state, operator, positions, svd_option):
    assert len(positions) == 2
    if svd_option is None:
        svd_option = ReducedSVD()
    x_pos, y_pos = positions
    x, y = state.grid[x_pos], state.grid[y_pos]
    operator = state.backend.astensor(operator)

    if x_pos[0] < y_pos[0]:  # [x y]^T
        prod_subscripts = 'abcdxp,cfghyq,xyuv->abndup,nfghvq'
        scale_u_subscripts = 'absdup,s->absdup'
        scale_v_subscripts = 'sbcdvp,s->sbcdvp'
    elif x_pos[0] > y_pos[0]:  # [y x]^T
        prod_subscripts = 'abcdxp,efahyq,xyuv->nbcdup,efnhvq'
        scale_u_subscripts = 'sbcdup,s->sbcdup'
        scale_v_subscripts = 'absdvp,s->absdvp'
    elif x_pos[1] < y_pos[1]:  # [x y]
        prod_subscripts = 'abcdxp,efgbyq,xyuv->ancdup,efgnvq'
        scale_u_subscripts = 'ascdup,s->ascdup'
        scale_v_subscripts = 'abcsvp,s->abcsvp'
    elif x_pos[1] > y_pos[1]:  # [y x]
        prod_subscripts = 'abcdxp,edghyq,xyuv->abcnup,enghvq'
        scale_u_subscripts = 'abcsup,s->abcsup'
        scale_v_subscripts = 'ascdvp,s->ascdvp'
    else:
        assert False

    u, s, v = state.backend.einsumsvd(prod_subscripts,
                                      x,
                                      y,
                                      operator,
                                      option=svd_option)
    s = s**0.5
    u = state.backend.einsum(scale_u_subscripts, u, s)
    v = state.backend.einsum(scale_v_subscripts, v, s)
    state.grid[x_pos] = u
    state.grid[y_pos] = v
示例#16
0
def swap_local_pair_local_gram_qr_svd(state, x_pos, y_pos, rank):
    if x_pos[0] < y_pos[0]:  # [x y]^T
        gram_x_subscripts = 'abcdxp,abCdXP->xpcXPC'
        gram_y_subscripts = 'cfghyq,CfghYQ->yqcYQC'
        xq_subscripts = 'abcdxp,xpci->abdi'
        yq_subscripts = 'cfghyq,yqcj->fghj'
        recover_x_subscripts = 'abcdxp,cxpsyq->absdyq'
        recover_y_subscripts = 'cfghyq,cyqsxp->sfghxp'
    elif x_pos[0] > y_pos[0]:  # [y x]^T
        gram_x_subscripts = 'abcdxp,AbcdXP->xpaXPA'
        gram_y_subscripts = 'efahyq,efAhYQ->yqaYQA'
        xq_subscripts = 'abcdxp,xpai->bcdi'
        yq_subscripts = 'efahyq,yqaj->efhj'
        recover_x_subscripts = 'abcdxp,axpsyq->sbcdyq'
        recover_y_subscripts = 'efahyq,ayqsxp->efshxp'
    elif x_pos[1] < y_pos[1]:  # [x y]
        gram_x_subscripts = 'abcdxp,aBcdXP->xpbXPB'
        gram_y_subscripts = 'efgbyq,efgBYQ->yqbYQB'
        xq_subscripts = 'abcdxp,xpbi->acdi'
        yq_subscripts = 'efgbyq,yqbj->efgj'
        recover_x_subscripts = 'abcdxp,bxpsyq->ascdyq'
        recover_y_subscripts = 'efgbyq,byqsxp->efgsxp'
    elif x_pos[1] > y_pos[1]:  # [y x]
        gram_x_subscripts = 'abcdxp,abcDXP->xpdXPD'
        gram_y_subscripts = 'edghyq,eDghYQ->yqdYQD'
        xq_subscripts = 'abcdxp,xpdi->abci'
        yq_subscripts = 'edghyq,yqdj->eghj'
        recover_x_subscripts = 'abcdxp,dxpsyq->abcsyq'
        recover_y_subscripts = 'edghyq,dyqsxp->esghxp'
    else:
        assert False

    numpy_backend = tensorbackends.get('numpy')

    def gram_qr_local(backend, a, gram_a_subscripts, q_subscripts):
        gram_a = backend.einsum(gram_a_subscripts, a.conj(), a)
        d1, d2, xi = gram_a.shape[:3]

        # local
        gram_a = gram_a.numpy().reshape(d1 * d2 * xi, d1 * d2 * xi)
        w, v = la.eigh(gram_a, overwrite_a=True)
        s = np.clip(w, 0, None)**0.5
        s_pinv = np.divide(1, s, out=np.zeros_like(s), where=s != 0)
        r = np.einsum('j,ij->ji', s, v.conj()).reshape(d1 * d2 * xi, d1, d2,
                                                       xi)
        r_inv = np.einsum('j,ij->ij', s_pinv,
                          v).reshape(d1, d2, xi, d1 * d2 * xi)
        return numpy_backend.tensor(r), numpy_backend.tensor(r_inv)

    x, y = state.grid[x_pos], state.grid[y_pos]

    xr, xr_inv = gram_qr_local(state.backend, x, gram_x_subscripts,
                               xq_subscripts)
    yr, yr_inv = gram_qr_local(state.backend, y, gram_y_subscripts,
                               yq_subscripts)

    u, s, v = numpy_backend.einsumsvd('ixpk,jyqk->isyq,jsxp',
                                      xr,
                                      yr,
                                      option=ReducedSVD(rank))
    s **= 0.5
    u = numpy_backend.einsum('xpki,isyq,s->kxpsyq', xr_inv, u, s)
    v = numpy_backend.einsum('yqkj,jsxp,s->kyqsxp', yr_inv, v, s)

    u = state.backend.astensor(u)
    v = state.backend.astensor(v)
    state.grid[x_pos] = state.backend.einsum(recover_x_subscripts, x, u)
    state.grid[y_pos] = state.backend.einsum(recover_y_subscripts, y, v)
示例#17
0
def apply_local_pair_operator_local_gram_qr_svd(state,
                                                operator,
                                                positions,
                                                rank,
                                                flip=False):
    assert len(positions) == 2
    x_pos, y_pos = positions
    x, y = state.grid[x_pos], state.grid[y_pos]

    if flip:
        if x_pos[0] < y_pos[0]:  # [x y]^T
            gram_x_subscripts = 'abcdpx,abCdpX->xcXC'
            gram_y_subscripts = 'cfghqy,CfghqY->ycYC'
            recover_x_subscripts = 'abcdpx,cxsu->absdpu'
            recover_y_subscripts = 'cfghqy,cysv->sfghqv'
        elif x_pos[0] > y_pos[0]:  # [y x]^T
            gram_x_subscripts = 'abcdpx,AbcdpX->xaXA'
            gram_y_subscripts = 'efahpy,efAhpY->yaYA'
            recover_x_subscripts = 'abcdpx,axsu->sbcdpu'
            recover_y_subscripts = 'efahqy,aysv->efshqv'
        elif x_pos[1] < y_pos[1]:  # [x y]
            gram_x_subscripts = 'abcdpx,aBcdpX->xbXB'
            gram_y_subscripts = 'efgbqy,efgBqY->ybYB'
            recover_x_subscripts = 'abcdpx,bxsu->ascdpu'
            recover_y_subscripts = 'efgbqy,bysv->efgsqv'
        elif x_pos[1] > y_pos[1]:  # [y x]
            gram_x_subscripts = 'abcdpx,abcDpX->xdXD'
            gram_y_subscripts = 'edghqy,eDghqY->ydYD'
            recover_x_subscripts = 'abcdpx,dxsu->abcspu'
            recover_y_subscripts = 'edghqy,dysv->esghqv'
        else:
            assert False
    else:
        if x_pos[0] < y_pos[0]:  # [x y]^T
            gram_x_subscripts = 'abcdxp,abCdXp->xcXC'
            gram_y_subscripts = 'cfghyq,CfghYq->ycYC'
            recover_x_subscripts = 'abcdxp,cxsu->absdup'
            recover_y_subscripts = 'cfghyq,cysv->sfghvq'
        elif x_pos[0] > y_pos[0]:  # [y x]^T
            gram_x_subscripts = 'abcdxp,AbcdXp->xaXA'
            gram_y_subscripts = 'efahyq,efAhYq->yaYA'
            recover_x_subscripts = 'abcdxp,axsu->sbcdup'
            recover_y_subscripts = 'efahyq,aysv->efshvq'
        elif x_pos[1] < y_pos[1]:  # [x y]
            gram_x_subscripts = 'abcdxp,aBcdXp->xbXB'
            gram_y_subscripts = 'efgbyq,efgBYq->ybYB'
            recover_x_subscripts = 'abcdxp,bxsu->ascdup'
            recover_y_subscripts = 'efgbyq,bysv->efgsvq'
        elif x_pos[1] > y_pos[1]:  # [y x]
            gram_x_subscripts = 'abcdxp,abcDXp->xdXD'
            gram_y_subscripts = 'edghyq,eDghYq->ydYD'
            recover_x_subscripts = 'abcdxp,dxsu->abcsup'
            recover_y_subscripts = 'edghyq,dysv->esghvq'
        else:
            assert False

    numpy_backend = tensorbackends.get('numpy')

    def gram_qr_local(backend, a, gram_a_subscripts):
        gram_a = backend.einsum(gram_a_subscripts, a.conj(), a)
        d, xi = gram_a.shape[:2]

        # local
        gram_a = gram_a.numpy().reshape(d * xi, d * xi)
        w, v = la.eigh(gram_a, overwrite_a=True)
        s = np.clip(w, 0, None)**0.5
        s_pinv = np.divide(1, s, out=np.zeros_like(s), where=s != 0)
        r = np.einsum('j,ij->ji', s, v.conj()).reshape(d * xi, d, xi)
        r_inv = np.einsum('j,ij->ij', s_pinv, v).reshape(d, xi, d * xi)
        return numpy_backend.tensor(r), numpy_backend.tensor(r_inv)

    xr, xr_inv = gram_qr_local(state.backend, x, gram_x_subscripts)
    yr, yr_inv = gram_qr_local(state.backend, y, gram_y_subscripts)

    operator = numpy_backend.tensor(
        operator if isinstance(operator, np.ndarray) else operator.numpy())
    u, s, v = numpy_backend.einsumsvd('ixk,jyk,uvxy->isu,jsv',
                                      xr,
                                      yr,
                                      operator,
                                      option=ReducedSVD(rank))
    s **= 0.5
    u = numpy_backend.einsum('xki,isu,s->kxsu', xr_inv, u, s)
    v = numpy_backend.einsum('ykj,jsv,s->kysv', yr_inv, v, s)

    u = state.backend.astensor(u)
    v = state.backend.astensor(v)
    state.grid[x_pos] = state.backend.einsum(recover_x_subscripts, x, u)
    state.grid[y_pos] = state.backend.einsum(recover_y_subscripts, y, v)
示例#18
0
def apply_local_pair_operator_local_gram_qr(state,
                                            operator,
                                            positions,
                                            rank,
                                            flip=False):
    assert len(positions) == 2
    x_pos, y_pos = positions
    x, y = state.grid[x_pos], state.grid[y_pos]
    operator = state.backend.astensor(operator)

    if flip:
        if x_pos[0] < y_pos[0]:  # [x y]^T
            gram_x_subscripts = 'abcdpx,abCdpX->xcXC'
            gram_y_subscripts = 'cfghqy,CfghqY->ycYC'
            xq_subscripts = 'abcdpx,xci->abdpi'
            yq_subscripts = 'cfghqy,ycj->fghqj'
            recover_x_subscripts = 'abdpi,isu,s->absdpu'
            recover_y_subscripts = 'fghqj,jsv,s->sfghqv'
        elif x_pos[0] > y_pos[0]:  # [y x]^T
            gram_x_subscripts = 'abcdpx,AbcdpX->xaXA'
            gram_y_subscripts = 'efahpy,efAhpY->yaYA'
            xq_subscripts = 'abcdpx,xai->bcdpi'
            yq_subscripts = 'efahqy,yaj->efhqj'
            recover_x_subscripts = 'bcdpi,isu,s->sbcdpu'
            recover_y_subscripts = 'efhqj,jsv,s->efshqv'
        elif x_pos[1] < y_pos[1]:  # [x y]
            gram_x_subscripts = 'abcdpx,aBcdpX->xbXB'
            gram_y_subscripts = 'efgbqy,efgBqY->ybYB'
            xq_subscripts = 'abcdpx,xbi->acdpi'
            yq_subscripts = 'efgbqy,ybj->efgqj'
            recover_x_subscripts = 'acdpi,isu,s->ascdpu'
            recover_y_subscripts = 'efgqj,jsv,s->efgsqv'
        elif x_pos[1] > y_pos[1]:  # [y x]
            gram_x_subscripts = 'abcdpx,abcDpX->xdXD'
            gram_y_subscripts = 'edghqy,eDghqY->ydYD'
            xq_subscripts = 'abcdpx,xdi->abcpi'
            yq_subscripts = 'edghqy,ydj->eghqj'
            recover_x_subscripts = 'abcpi,isu,s->abcspu'
            recover_y_subscripts = 'eghqj,jsv,s->esghqv'
        else:
            assert False
    else:
        if x_pos[0] < y_pos[0]:  # [x y]^T
            gram_x_subscripts = 'abcdxp,abCdXp->xcXC'
            gram_y_subscripts = 'cfghyq,CfghYq->ycYC'
            xq_subscripts = 'abcdxp,xci->abdpi'
            yq_subscripts = 'cfghyq,ycj->fghqj'
            recover_x_subscripts = 'abdpi,isu,s->absdup'
            recover_y_subscripts = 'fghqj,jsv,s->sfghvq'
        elif x_pos[0] > y_pos[0]:  # [y x]^T
            gram_x_subscripts = 'abcdxp,AbcdXp->xaXA'
            gram_y_subscripts = 'efahyq,efAhYq->yaYA'
            xq_subscripts = 'abcdxp,xai->bcdpi'
            yq_subscripts = 'efahyq,yaj->efhqj'
            recover_x_subscripts = 'bcdpi,isu,s->sbcdup'
            recover_y_subscripts = 'efhqj,jsv,s->efshvq'
        elif x_pos[1] < y_pos[1]:  # [x y]
            gram_x_subscripts = 'abcdxp,aBcdXp->xbXB'
            gram_y_subscripts = 'efgbyq,efgBYq->ybYB'
            xq_subscripts = 'abcdxp,xbi->acdpi'
            yq_subscripts = 'efgbyq,ybj->efgqj'
            recover_x_subscripts = 'acdpi,isu,s->ascdup'
            recover_y_subscripts = 'efgqj,jsv,s->efgsvq'
        elif x_pos[1] > y_pos[1]:  # [y x]
            gram_x_subscripts = 'abcdxp,abcDXp->xdXD'
            gram_y_subscripts = 'edghyq,eDghYq->ydYD'
            xq_subscripts = 'abcdxp,xdi->abcpi'
            yq_subscripts = 'edghyq,ydj->eghqj'
            recover_x_subscripts = 'abcpi,isu,s->abcsup'
            recover_y_subscripts = 'eghqj,jsv,s->esghvq'
        else:
            assert False

    def gram_qr_local(backend, a, gram_a_subscripts, q_subscripts):
        gram_a = backend.einsum(gram_a_subscripts, a.conj(), a)
        d, xi = gram_a.shape[:2]

        # local
        gram_a = gram_a.numpy().reshape(d * xi, d * xi)
        w, v = la.eigh(gram_a, overwrite_a=True)
        s = np.clip(w, 0, None)**0.5
        s_pinv = np.divide(1, s, out=np.zeros_like(s), where=s != 0)
        r = np.einsum('j,ij->ji', s, v.conj()).reshape(d * xi, d, xi)
        r_inv = np.einsum('j,ij->ij', s_pinv, v).reshape(d, xi, d * xi)

        r = backend.astensor(r)
        r_inv = backend.astensor(r_inv)
        q = backend.einsum(q_subscripts, a, r_inv)
        return q, r

    xq, xr = gram_qr_local(state.backend, x, gram_x_subscripts, xq_subscripts)
    yq, yr = gram_qr_local(state.backend, y, gram_y_subscripts, yq_subscripts)

    u, s, v = state.backend.einsumsvd('ixk,jyk,uvxy->isu,jsv',
                                      xr,
                                      yr,
                                      operator,
                                      option=ReducedSVD(rank))
    s = s**0.5
    state.grid[x_pos] = state.backend.einsum(recover_x_subscripts, xq, u, s)
    state.grid[y_pos] = state.backend.einsum(recover_y_subscripts, yq, v, s)
示例#19
0
def apply_local_pair_operator_qr(state, operator, positions, rank, flip=False):
    assert len(positions) == 2
    svd_option = ReducedSVD(rank)
    x_pos, y_pos = positions
    x, y = state.grid[x_pos], state.grid[y_pos]
    operator = state.backend.astensor(operator)

    if flip:
        if x_pos[0] < y_pos[0]:  # [x y]^T
            split_x_subscripts = 'abcdpx->abdpi,icx'
            split_y_subscripts = 'cfghqy->fghqj,jcy'
            recover_x_subscripts = 'abdpi,isu,s->absdpu'
            recover_y_subscripts = 'fghqj,jsv,s->sfghqv'
        elif x_pos[0] > y_pos[0]:  # [y x]^T
            split_x_subscripts = 'abcdpx->bcdpi,iax'
            split_y_subscripts = 'efahqy->efhqj,jay'
            recover_x_subscripts = 'bcdpi,isu,s->sbcdpu'
            recover_y_subscripts = 'efhqj,jsv,s->efshqv'
        elif x_pos[1] < y_pos[1]:  # [x y]
            split_x_subscripts = 'abcdpx->acdpi,ibx'
            split_y_subscripts = 'efgbqy->efgqj,jby'
            recover_x_subscripts = 'acdpi,isu,s->ascdpu'
            recover_y_subscripts = 'efgqj,jsv,s->efgsqv'
        elif x_pos[1] > y_pos[1]:  # [y x]
            split_x_subscripts = 'abcdpx->abcpi,idx'
            split_y_subscripts = 'edghqy->eghqj,jdy'
            recover_x_subscripts = 'abcpi,isu,s->abcspu'
            recover_y_subscripts = 'eghqj,jsv,s->esghqv'
        else:
            assert False
    else:
        if x_pos[0] < y_pos[0]:  # [x y]^T
            split_x_subscripts = 'abcdxp->abdpi,icx'
            split_y_subscripts = 'cfghyq->fghqj,jcy'
            recover_x_subscripts = 'abdpi,isu,s->absdup'
            recover_y_subscripts = 'fghqj,jsv,s->sfghvq'
        elif x_pos[0] > y_pos[0]:  # [y x]^T
            split_x_subscripts = 'abcdxp->bcdpi,iax'
            split_y_subscripts = 'efahyq->efhqj,jay'
            recover_x_subscripts = 'bcdpi,isu,s->sbcdup'
            recover_y_subscripts = 'efhqj,jsv,s->efshvq'
        elif x_pos[1] < y_pos[1]:  # [x y]
            split_x_subscripts = 'abcdxp->acdpi,ibx'
            split_y_subscripts = 'efgbyq->efgqj,jby'
            recover_x_subscripts = 'acdpi,isu,s->ascdup'
            recover_y_subscripts = 'efgqj,jsv,s->efgsvq'
        elif x_pos[1] > y_pos[1]:  # [y x]
            split_x_subscripts = 'abcdxp->abcpi,idx'
            split_y_subscripts = 'edghyq->eghqj,jdy'
            recover_x_subscripts = 'abcpi,isu,s->abcsup'
            recover_y_subscripts = 'eghqj,jsv,s->esghvq'
        else:
            assert False

    xq, xr = state.backend.einqr(split_x_subscripts, x)
    yq, yr = state.backend.einqr(split_y_subscripts, y)

    u, s, v = state.backend.einsumsvd('ikx,jky,uvxy->isu,jsv',
                                      xr,
                                      yr,
                                      operator,
                                      option=svd_option)
    s = s**0.5
    state.grid[x_pos] = state.backend.einsum(recover_x_subscripts, xq, u, s)
    state.grid[y_pos] = state.backend.einsum(recover_y_subscripts, yq, v, s)