Пример #1
0
    def toMatrix(self, rows=None, cols=None):
        """
        Make a matrix of the data of this tensor, given the labels or legs of rows and cols.

        Parameters
        ----------
        rows : None or list of str or list of Leg
            The legs for the rows of the matrix. If None, deducted from cols.
        cols : None or list of str or list of Leg
            The legs for the cols of the matrix. If None, deducted from rows.

        Returns
        -------
        2D ndarray of float
            The data of this tensor, in the form of (rows, cols).
        """
        # print(rows, cols)
        # print(self.labels)
        # input two set of legs
        assert (not self.tensorLikeFlag), funcs.errorMessage(
            'TensorLike cannot be transferred to matrix since no data contained.',
            'Tensor.toMatrix')
        assert not (
            (rows is None) and (cols is None)
        ), "Error in Tensor.toMatrix: toMatrix must have at least row or col exist."
        if (rows is not None) and (isinstance(rows[0], str)):
            rows = [self.getLeg(label) for label in rows]
        if (cols is not None) and (isinstance(cols[0], str)):
            cols = [self.getLeg(label) for label in cols]
        if (cols is None):
            cols = funcs.listDifference(self.legs, rows)
        if (rows is None):
            rows = funcs.listDifference(self.legs, cols)
        assert (
            funcs.compareLists(rows + cols, self.legs)
        ), "Error Tensor.toMatrix: rows + cols must contain(and only contain) all legs of tensor."

        colIndices = self.getLegIndices(cols)
        rowIndices = self.getLegIndices(rows)

        colShape = tuple([self.shape[x] for x in colIndices])
        rowShape = tuple([self.shape[x] for x in rowIndices])
        colTotalSize = funcs.tupleProduct(colShape)
        rowTotalSize = funcs.tupleProduct(rowShape)

        moveFrom = rowIndices + colIndices
        moveTo = list(range(len(moveFrom)))

        data = xplib.xp.moveaxis(xplib.xp.copy(self.a), moveFrom, moveTo)
        data = xplib.xp.reshape(data, (rowTotalSize, colTotalSize))
        return data
Пример #2
0
 def getSize(tsA, tsB):
     '''
     contract self.contractRes[tsA] and self.contractRes[tsB]
     then give the size of the output tensor
     '''
     _, shape, _ = calculateContractRes(tsA, tsB)
     return funcs.tupleProduct(shape)
Пример #3
0
    def toMatrix(self, rows, cols):
        """
        Deprecated

        Make a matrix of the data of this diagonal tensor, given the labels or legs of rows and cols. 

        Deprecated since this function is time comsuming(O(n^d)), and for most of the cases there are much better ways to use the data rather than making a matrix. For details, see CTL.tensor.contract for more information.

        Parameters
        ----------
        rows : None or list of str or list of Leg
            The legs for the rows of the matrix. If None, deducted from cols.
        cols : None or list of str or list of Leg
            The legs for the cols of the matrix. If None, deducted from rows.

        Returns
        -------
        2D ndarray of float
            The data of this tensor, in the form of (rows, cols).
        """
        assert (not self.tensorLikeFlag), funcs.errorMessage('DiagonalTensorLike cannot be transferred to matrix since no data contained.', 'DiagonalTensor.toMatrix')
        # print(rows, cols)
        # print(self.labels)
        # input two set of legs
        funcs.deprecatedFuncWarning(funcName = "DiagonalTensor.toMatrix", deprecateMessage = "Diagonal tensors should be used in a better way for linear algebra calculation rather than be made into a matrix.")
        assert not ((rows is None) and (cols is None)), "Error in Tensor.toMatrix: toMatrix must have at least row or col exist."
        if (rows is not None) and (isinstance(rows[0], str)):
            rows = [self.getLeg(label) for label in rows]
        if (cols is not None) and (isinstance(cols[0], str)):
            cols = [self.getLeg(label) for label in cols]
        if (cols is None):
            cols = funcs.listDifference(self.legs, rows)
        if (rows is None):
            rows = funcs.listDifference(self.legs, cols)
        assert (funcs.compareLists(rows + cols, self.legs)), "Error Tensor.toMatrix: rows + cols must contain(and only contain) all legs of tensor."

        colIndices = self.getLegIndices(cols)
        rowIndices = self.getLegIndices(rows)

        colShape = tuple([self.shape[x] for x in colIndices])
        rowShape = tuple([self.shape[x] for x in rowIndices])
        colTotalSize = funcs.tupleProduct(colShape)
        rowTotalSize = funcs.tupleProduct(rowShape)

        data = funcs.diagonalNDTensor(self.a, self.dim)
        data = xplib.xp.reshape(data, (rowTotalSize, colTotalSize))
        return data
Пример #4
0
    def test_TensorGraph(self):
        shapeA = (300, 4, 5)
        shapeB = (300, 6)
        shapeC = (4, 6, 5)
        a = Tensor(shape=shapeA,
                   labels=['a300', 'b4', 'c5'],
                   data=np.ones(shapeA))
        b = Tensor(shape=shapeB, labels=['a300', 'd6'], data=np.ones(shapeB))
        c = Tensor(shape=shapeC,
                   labels=['b4', 'd6', 'c5'],
                   data=np.ones(shapeC))

        makeLink(a.getLeg('a300'), b.getLeg('a300'))
        makeLink(a.getLeg('b4'), c.getLeg('b4'))
        makeLink(a.getLeg('c5'), c.getLeg('c5'))
        makeLink(b.getLeg('d6'), c.getLeg('d6'))

        tensorList = [a, b, c]

        tensorGraph = makeTensorGraph(tensorList)

        # if we use typical dim, then contract between 0 and 2 first is preferred
        # and this is not true if we consider the real bond dimension 300

        seq = tensorGraph.optimalContractSequence(typicalDim=None)
        self.assertListEqual(seq, [(0, 1), (2, 0)])
        self.assertEqual(tensorGraph.optimalCostResult(), 36120)

        seq = tensorGraph.optimalContractSequence(typicalDim=None, bf=True)
        self.assertEqual(tensorGraph.optimalCostResult(), 36120)

        # res1 = contractWithSequence(tensorList, seq = seq)

        seq = tensorGraph.optimalContractSequence(typicalDim=10)
        self.assertListEqual(seq, [(0, 2), (1, 0)])
        self.assertEqual(tensorGraph.optimalCostResult(), 10100)

        seq = tensorGraph.optimalContractSequence(typicalDim=10, bf=True)
        self.assertEqual(tensorGraph.optimalCostResult(), 10100)

        res2 = contractWithSequence(tensorList, seq=seq)
        self.assertEqual(
            res2.a**2,
            funcs.tupleProduct(shapeA) * funcs.tupleProduct(shapeB) *
            funcs.tupleProduct(shapeC))
Пример #5
0
    def checkShapeDataCompatible(self, shape, data):
        """
        Check whether data is compatible with shape. For more information, check "Notes" of comments for Tensor.

        Parameters
        ----------
        shape : tuple of int
            The expected shape of the tensor.
        data : None or ndarray of float
            The data to be put into the tensor.
        
        Returns
        -------
        bool
            Whether the shape and data are compatible.
        """
        # we know shape, and want to see if data is ok
        if (data is None):
            return True
        return (funcs.tupleProduct(data.shape) == funcs.tupleProduct(shape))
Пример #6
0
def contractCost(ta, tb):
    """
    The cost of contraction of two tensors.

    Parameters
    ----------
    ta, tb : Tensor
        Two tensors we want to contract.

    Returns
    -------
    cost : int
        The exact cost for contraction of two tensors(e.g. for two matrices A * B & B * C, the cost is A * B * C. However, for diagonal tensors, the cost is just the size of output tensor).
    costLevel : int
        The order of the cost(how many dimensions). This is used when we want to decide the order of our calculation to a given bond dimension chi.
    """
    diagonalA, diagonalB = ta.diagonalFlag, tb.diagonalFlag
    if (diagonalA and diagonalB):
        return ta.bondDimension(), 1

    diagonal = diagonalA or diagonalB

    bonds = shareBonds(ta, tb)
    intersectionShape = tuple([bond.legs[0].dim for bond in bonds])
    if (not diagonal):
        cost = funcs.tupleProduct(ta.shape) * funcs.tupleProduct(
            tb.shape) // funcs.tupleProduct(intersectionShape)
        costLevel = len(ta.shape) + len(tb.shape) - len(intersectionShape)
    else:
        cost = funcs.tupleProduct(ta.shape) * funcs.tupleProduct(
            tb.shape) // (funcs.tupleProduct(intersectionShape)**2)
        costLevel = len(ta.shape) + len(tb.shape) - 2 * len(intersectionShape)
    return cost, costLevel
Пример #7
0
 def totalSize(self):
     return funcs.tupleProduct(self.shape)
Пример #8
0
def SchimdtDecomposition(ta,
                         tb,
                         chi,
                         squareRootSeparation=False,
                         swapLabels=([], []),
                         singularValueEps=1e-10):
    '''
    Schimdt decomposition between tensor ta and tb
    return ta, s, tb
    ta should be in canonical form, that is, a a^dagger = I
    to do this, first contract ta and tb, while keeping track of legs from a and legs from b
    then SVD over the matrix, take the required chi singular values
    take first chi eigenvectors for a and b, create a diagonal tensor for singular value tensor

    if squareRootSeparation is True: then divide s into two square root diagonal tensors
    and contract each into ta and tb, return ta, None, tb

    if swapLabels is not ([], []): swap the two set of labels for output, so we swapped the locations of two tensors on MPS
    e.g. t[i], t[i + 1] = SchimdtDecomposition(t[i], t[i + 1], chi = chi, squareRootSeparation = True, swapLabels = (['o'], ['o']))
    we can swap the two tensors t[i] & t[i + 1], both have an "o" leg connected to outside
    while other legs(e.g. internal legs in MPS, usually 'l' and 'r') will not be affected
    '''

    funcName = 'CTL.examples.Schimdt.SchimdtDecomposition'
    sb = shareBonds(ta, tb)
    assert (len(sb) > 0), funcs.errorMessage(
        "Schimdt Decomposition cannot accept two tensors without common bonds, {} and {} gotten."
        .format(ta, tb),
        location=funcName)
    assert (ta.tensorLikeFlag == tb.tensorLikeFlag), funcs.errorMessage(
        "Schimdt Decomposition must havge two objects being either Tensor or TensorLike simultaneously, but {} and {} obtained."
        .format(ta, tb),
        location=funcName)

    TLFlag = ta.tensorLikeFlag
    sbDim = funcs.tupleProduct(tuple([bond.legs[0].dim for bond in sb]))

    sharedLabelA = sb[0].sideLeg(ta).name
    sharedLabelB = sb[0].sideLeg(tb).name
    # if (sharedLabelA.startswith('a-')):
    #     raise ValueError(funcs.errorMessage(err = "shared label {} of tensor A starts with 'a-'.".format(sharedLabelA), location = funcName))
    # if (sharedLabelB.startswith('b-')):
    #     raise ValueError(funcs.errorMessage(err = "shared label {} of tensor B starts with 'b-'.".format(sharedLabelB), location = funcName))

    # assert (ta.xp == tb.xp), funcs.errorMessage("Schimdt Decomposition cannot accept two tensors with different xp: {} and {} gotten.".format(ta.xp, tb.xp), location = funcName)

    assert (len(swapLabels[0]) == len(swapLabels[1])), funcs.errorMessage(
        err="invalid swap labels {}.".format(swapLabels), location=funcName)
    assert ta.labelsInTensor(swapLabels[0]), funcs.errorMessage(
        err="{} not in tensor {}.".format(swapLabels[0], ta),
        location=funcName)
    assert tb.labelsInTensor(swapLabels[1]), funcs.errorMessage(
        err="{} not in tensor {}.".format(swapLabels[1], tb),
        location=funcName)

    ta.addTensorTag('a')
    tb.addTensorTag('b')
    for swapLabel in swapLabels[0]:
        ta.renameLabel('a-' + swapLabel, 'b-' + swapLabel)
    for swapLabel in swapLabels[1]:
        tb.renameLabel('b-' + swapLabel, 'a-' + swapLabel)

    tot = contractTwoTensors(ta, tb)

    legA = [leg for leg in tot.legs if leg.name.startswith('a-')]
    legB = [leg for leg in tot.legs if leg.name.startswith('b-')]

    labelA = [leg.name for leg in legA]
    labelB = [leg.name for leg in legB]
    # not remove a- and b- here, since we need to add an internal leg, and we need to distinguish it from others

    shapeA = tuple([leg.dim for leg in legA])
    shapeB = tuple([leg.dim for leg in legB])

    totShapeA = funcs.tupleProduct(shapeA)
    totShapeB = funcs.tupleProduct(shapeB)

    if (TLFlag):
        u = None
        vh = None
        s = None
        chi = min([chi, totShapeA, totShapeB, sbDim])
    else:
        mat = tot.toMatrix(rows=labelA, cols=labelB)

        # np = ta.xp # default numpy

        u, s, vh = xplib.xp.linalg.svd(mat)

        chi = min([
            chi, totShapeA, totShapeB,
            funcs.nonZeroElementN(s, singularValueEps)
        ])
        u = u[:, :chi]
        s = s[:chi]
        vh = vh[:chi]

    if (squareRootSeparation):
        if (TLFlag):
            uS = None
            vS = None
        else:
            sqrtS = xplib.xp.sqrt(s)
            uS = funcs.rightDiagonalProduct(u, sqrtS)
            vS = funcs.leftDiagonalProduct(vh, sqrtS)

        outLegForU = Leg(None, chi, name=sharedLabelA)
        # inLegForU = Leg(None, chi, name = sharedLabelB)
        # internalLegForS1 = Leg(None, chi, name = 'o')
        # internalLegForS2 = Leg(None, chi, name = 'o')
        # inLegForV = Leg(None, chi, name = sharedLabelA)
        outLegForV = Leg(None, chi, name=sharedLabelB)

        uTensor = Tensor(data=uS,
                         legs=legA + [outLegForU],
                         shape=shapeA + (chi, ),
                         tensorLikeFlag=TLFlag)
        # s1Tensor = DiagonalTensor(data = xplib.xp.sqrt(s), legs = [inLegForU, internalLegForS1], shape = (chi, chi))
        # s2Tensor = DiagonalTensor(data = xplib.xp.sqrt(s), legs = [internalLegForS2, inLegForV], shape = (chi, chi))
        vTensor = Tensor(data=vS,
                         legs=[outLegForV] + legB,
                         shape=(chi, ) + shapeB,
                         tensorLikeFlag=TLFlag)

        # legs should be automatically set by Tensor / DiagonalTensor, so no need for setTensor

        # outLegForU.setTensor(uTensor)
        # outLegForV.setTensor(vTensor)

        # inLegForU.setTensor(sTensor)
        # inLegForV.setTensor(sTensor)

        # remove a- and b-
        for leg in legA:
            if (leg.name.startswith('a-')):
                leg.name = leg.name[2:]

        for leg in legB:
            if (leg.name.startswith('b-')):
                leg.name = leg.name[2:]

        makeLink(outLegForU, outLegForV)

        # makeLink(outLegForU, inLegForU)
        # makeLink(outLegForV, inLegForV)
        # makeLink(internalLegForS1, internalLegForS2)
        # uTensor = contractTwoTensors(uTensor, s1Tensor)
        # vTensor = contractTwoTensors(vTensor, s2Tensor)
        return uTensor, None, vTensor

    outLegForU = Leg(None, chi, name=sharedLabelA)
    inLegForU = Leg(None, chi, name=sharedLabelB)
    inLegForV = Leg(None, chi, name=sharedLabelA)
    outLegForV = Leg(None, chi, name=sharedLabelB)

    uTensor = Tensor(data=u,
                     legs=legA + [outLegForU],
                     shape=shapeA + (chi, ),
                     tensorLikeFlag=TLFlag)
    sTensor = DiagonalTensor(data=s,
                             legs=[inLegForU, inLegForV],
                             shape=(chi, chi),
                             tensorLikeFlag=TLFlag)
    vTensor = Tensor(data=vh,
                     legs=[outLegForV] + legB,
                     shape=(chi, ) + shapeB,
                     tensorLikeFlag=TLFlag)

    # legs should be automatically set by Tensor / DiagonalTensor, so no need for setTensor

    # outLegForU.setTensor(uTensor)
    # outLegForV.setTensor(vTensor)

    # inLegForU.setTensor(sTensor)
    # inLegForV.setTensor(sTensor)

    # remove a- and b-
    for leg in legA:
        if (leg.name.startswith('a-')):
            leg.name = leg.name[2:]

    for leg in legB:
        if (leg.name.startswith('b-')):
            leg.name = leg.name[2:]

    makeLink(outLegForU, inLegForU)
    makeLink(outLegForV, inLegForV)

    return uTensor, sTensor, vTensor
Пример #9
0
 def test_tupleProduct(self):
     self.assertEqual(funcs.tupleProduct((2, 3)), 6)
Пример #10
0
 def checkShapeDataCompatible(self, shape, data):
     """
     For information, check Tensor.checkShapeDataCompatible.
     """
     # we know shape, and want to see if data is ok
     if (data is None):
         return True 
     if (isinstance(shape, int)):
         shape = tuple([shape] * len(data.shape))
     return ((len(data.shape) == 1) and (len(shape) > 0) and (len(data) == shape[0])) or (funcs.tupleProduct(data.shape) == funcs.tupleProduct(shape))