def outProduct(self, legList, newLabel): """ Do an out product to combine a set of legs to a single leg. Parameters ---------- legList : list of str or list of str The legs to be combined, or labels or the legs. newLabel : str The new label for the combined leg. Returns ------- Leg The new leg of all legs to be combined. """ assert ( isinstance(legList, list) and (len(legList) > 0) ), funcs.errorMessage( err= "outProduct cannot work on leg list of zero legs or non-list, {} obtained." .format(legList), location='Tensor.outProduct') if (isinstance(legList[0], str)): return self.outProduct([self.getLeg(label) for label in legList], newLabel) # connectedLegs = [leg for leg in legList if (leg.bond is not None)] # if (len(connectedLegs) > 0): # warnings.warn(funcs.warningMessage(warn = "out producting legs {} that has been connected: remove the connection.".format(connectedLegs), location = 'Tensor.outProduct')) self.moveLegsToFront(legList) n = len(legList) newShape = (-1, ) + self.shape[n:] if (self.tensorLikeFlag): newDim = 1 for leg in self.legs[:n]: newDim *= leg.dim else: self.a = xplib.xp.reshape(self.a, newShape) newDim = self.a.shape[0] self.legs = [Leg(self, newDim, newLabel)] + self.legs[n:] # return the new leg, for usage of wider usage return self.legs[0]
def merge(ta, tb, chi=None, bondName=None, renameWarning=True): """ Merge the shared bonds of two tensors. If not connected, make a warning and do nothing. Parameters ---------- ta, tb : Tensor chi : int, optional The upper-bound of the bond dimension of the bond after merged. If None, then no truncation. bondName : str, optional The name of bond after merging. If None, then for a list of [name1, name2, ... nameN], the name will be "{name1}|{name2}| .... |{nameN}". renameWarning : bool, default True If only one bond is shared, then the two Returns ------- ta, tb : Tensor The two tensors after merging all the common bonds to one bond. """ funcName = "CTL.tensor.contract.contract.truncate" # assert (ta.xp == tb.xp), funcs.errorMessage("Truncation cannot accept two tensors with different xp: {} and {} gotten.".format(ta.xp, tb.xp), location = funcName) assert (ta.tensorLikeFlag == tb.tensorLikeFlag), funcs.errorMessage( 'two tensors to be merged must be either Tensor or TensorLike simultaneously, {} and {} obtained.' .format(ta, tb), location=funcName) tensorLikeFlag = ta.tensorLikeFlag # xp = ta.xp ta, tb = mergeLink(ta, tb, bondName=bondName, renameWarning=renameWarning) if (chi is None): # no need for truncation return ta, tb sb = shareBonds(ta, tb) # assert (len(sb) > 0), funcs.errorMessage("Truncation cannot work on two tensors without common bonds: {} and {} gotten.".format(ta, tb), location = funcName) # if (bondName is None): # bondNameListA = [bond.sideLeg(ta).name for bond in sb] # bondNameListB = [bond.sideLeg(tb).name for bond in sb] # bondNameA = '|'.join(bondNameListA) # bondNameB = '|'.join(bondNameListB) # elif (isinstance(bondName, str)): # bondNameA = bondName # bondNameB = bondName # else: # bondNameA, bondNameB = bondName # tuple/list # if (renameFlag): if (len(sb) == 0): if (renameWarning): warnings.warn( funcs.warningMessage( warn= 'mergeLink cannot merge links between two tensors {} and {} not sharing any bond' .format(ta, tb), location=funcName), RuntimeWarning) return ta, tb assert (len(sb) == 1), funcs.errorMessage( "There should only be one common leg between ta and tb after mergeLink, {} obtained." .format(sb), location=funcName) legA = [bond.sideLeg(ta) for bond in sb] legB = [bond.sideLeg(tb) for bond in sb] bondNameA = legA[0].name bondNameB = legB[0].name remainLegA = ta.complementLegs(legA) remainLegB = tb.complementLegs(legB) if (not tensorLikeFlag): matA = ta.toMatrix(rows=None, cols=legA) matB = tb.toMatrix(rows=legB, cols=None) mat = matA @ matB u, s, vh = xplib.xp.linalg.svd(mat) chi = min( [chi, funcs.nonZeroElementN(s), matA.shape[0], matB.shape[1]]) u = u[:, :chi] s = s[:chi] vh = vh[:chi] uOutLeg = Leg(tensor=None, dim=chi, name=bondNameA) vOutLeg = Leg(tensor=None, dim=chi, name=bondNameB) # print(legA, legB) sqrtS = xplib.xp.sqrt(s) uS = funcs.rightDiagonalProduct(u, sqrtS) vS = funcs.leftDiagonalProduct(vh, sqrtS) uTensor = Tensor(data=uS, legs=remainLegA + [uOutLeg]) vTensor = Tensor(data=vS, legs=[vOutLeg] + remainLegB) else: chi = min([ chi, legA[0].dim, ta.totalSize // legA[0].dim, tb.totalSize // legB[0].dim ]) uOutLeg = Leg(tensor=None, dim=chi, name=bondNameA) vOutLeg = Leg(tensor=None, dim=chi, name=bondNameB) uTensor = Tensor(tensorLikeFlag=True, legs=remainLegA + [uOutLeg]) vTensor = Tensor(tensorLikeFlag=True, legs=[vOutLeg] + remainLegB) makeLink(uOutLeg, vOutLeg) return uTensor, vTensor
def createMPSFromTensor(tensor, chi=16): ''' tensor is a real Tensor with n outer legs transfer it into an MPS with Schimdt decomposition after this, we can manage the tensor network decomposition by MPS network decomposition finally consider if tensor is only a TensorLike object ''' # TODO: make this function work for tensorLike funcName = 'CTL.examples.MPS.createMPSFromTensor' legs = [leg for leg in tensor.legs] # xp = tensor.xp n = len(legs) assert (n > 0), funcs.errorMessage( "cannot create MPS from 0-D tensor {}.".format(tensor), location=funcName) if (n == 1): warnings.warn( funcs.warningMessage( "creating MPS for 1-D tensor {}.".format(tensor), location=funcName), RuntimeWarning) return FreeBoundaryMPS([tensor], chi=chi) a = xplib.xp.ravel(tensor.toTensor(labels=None)) lastDim = -1 tensors = [] lastRightLeg = None for i in range(n - 1): u, v = matrixSchimdtDecomposition(a, dim=legs[i].dim, chi=chi) leg = legs[i] if (i == 0): dim1 = u.shape[1] rightLeg = Leg(None, dim=dim1, name='r') tensor = Tensor(shape=(leg.dim, u.shape[1]), legs=[leg, rightLeg], data=u) lastRightLeg = rightLeg lastDim = dim1 else: dim1 = u.shape[-1] leftLeg = Leg(None, dim=lastDim, name='l') rightLeg = Leg(None, dim=dim1, name='r') tensor = Tensor(shape=(lastDim, leg.dim, u.shape[-1]), legs=[leftLeg, leg, rightLeg], data=u) makeLink(leftLeg, lastRightLeg) lastRightLeg = rightLeg lastDim = dim1 tensors.append(tensor) a = v leftLeg = Leg(None, dim=lastDim, name='l') tensor = Tensor(shape=(lastDim, legs[-1].dim), legs=[leftLeg, legs[-1]], data=a) makeLink(leftLeg, lastRightLeg) tensors.append(tensor) # print(tensors) return FreeBoundaryMPS(tensorList=tensors, chi=chi)
def test_deduce(self): self.showTestCaseBegin("diagonal tensor shape deduction") legA = Leg(tensor = None, dim = 5, name = 'a') legB = Leg(tensor = None, dim = 5, name = 'b') legBError = Leg(tensor = None, dim = 6, name = 'b') a = DiagonalTensor(legs = [legA, legB]) self.assertTupleEqual(a.shape, (5, 5)) self.assertEqual(a.dim, 2) self.assertTrue(funcs.compareLists(a.labels, ['a', 'b'])) self.assertTrue(funcs.floatArrayEqual(a.a, np.ones(5))) a = DiagonalTensor(legs = [legA, legB], labels = ['a', 'b']) self.assertTupleEqual(a.shape, (5, 5)) self.assertEqual(a.dim, 2) self.assertTrue(funcs.compareLists(a.labels, ['a', 'b'])) self.assertTrue(funcs.floatArrayEqual(a.a, np.ones(5))) a = DiagonalTensor(legs = [legA, legB], shape = 5) self.assertTupleEqual(a.shape, (5, 5)) self.assertEqual(a.dim, 2) self.assertTrue(funcs.compareLists(a.labels, ['a', 'b'])) self.assertTrue(funcs.floatArrayEqual(a.a, np.ones(5))) a = DiagonalTensor(legs = [legA, legB], shape = 5, data = np.zeros(5)) self.assertTupleEqual(a.shape, (5, 5)) self.assertEqual(a.dim, 2) self.assertTrue(funcs.compareLists(a.labels, ['a', 'b'])) self.assertTrue(funcs.floatArrayEqual(a.a, np.zeros(5))) def legDimNotEqualFunc(): _ = DiagonalTensor(legs = [legA, legBError]) def labelsSizeNotEqualFunc(): _ = DiagonalTensor(legs = [legA, legB], labels = ['a']) def labelsOrderNotEqualFunc(): _ = DiagonalTensor(legs = [legA, legB], labels = ['b', 'a']) def shapeSizeNotEqualFunc(): _ = DiagonalTensor(legs = [legA, legB], shape = (5, 6, 7)) def shapeOrderNotEqualFunc(): _ = DiagonalTensor(legs = [legA, legB], shape = (6, 5)) def dataDimNotEqualFunc(): _ = DiagonalTensor(legs = [legA, legB], data = np.zeros((5, 6, 7))) def dataShapeNotEqualFunc(): _ = DiagonalTensor(legs = [legA, legB], data = np.zeros((5, 6))) def labelsShapeNotCompatibleFunc(): _ = DiagonalTensor(legs = [legA, legB], labels = ['a'], data = np.zeros((5, 5))) def dimensionless1DDataErrorFunc(): _ = DiagonalTensor(legs = [], labels = [], data = np.zeros(3)) def dimensionless1DDataErrorFunc2(): _ = DiagonalTensor(labels = [], data = np.zeros(3)) self.assertRaises(ValueError, legDimNotEqualFunc) self.assertRaises(ValueError, labelsSizeNotEqualFunc) self.assertRaises(ValueError, labelsOrderNotEqualFunc) self.assertRaises(ValueError, shapeSizeNotEqualFunc) self.assertRaises(ValueError, shapeOrderNotEqualFunc) self.assertRaises(ValueError, dataDimNotEqualFunc) self.assertRaises(ValueError, dataShapeNotEqualFunc) self.assertRaises(ValueError, labelsShapeNotCompatibleFunc) self.assertRaises(ValueError, dimensionless1DDataErrorFunc) self.assertRaises(ValueError, dimensionless1DDataErrorFunc2) a = DiagonalTensor(shape = (2, 2), labels = ['a1', 'a2']) self.assertTupleEqual(a.shape, (2, 2)) self.assertEqual(a.dim, 2) a = DiagonalTensor(shape = (2, 2)) self.assertTupleEqual(a.shape, (2, 2)) self.assertEqual(a.dim, 2) self.assertTrue((a.a == np.ones(2)).all()) # default as identity tensor def shapeNotEqualFunc(): _ = DiagonalTensor(shape = (2, 3)) self.assertRaises(ValueError, shapeNotEqualFunc) def labelsShortFunc(): _ = DiagonalTensor(shape = (2, 2), labels = ['a1']) def labelsLongFunc(): _ = DiagonalTensor(shape = (2, 2), labels = ['a', 'b', 'c']) self.assertRaises(ValueError, labelsShortFunc) self.assertRaises(ValueError, labelsLongFunc) a = DiagonalTensor(shape = (2, 2), data = np.zeros((2, 2))) self.assertTupleEqual(a.shape, (2, 2)) self.assertEqual(a.dim, 2) a = DiagonalTensor(shape = (2, 2, 2), data = np.zeros(2)) self.assertTupleEqual(a.shape, (2, 2, 2)) self.assertEqual(a.dim, 3) def dataDimErrorFunc(): _ = DiagonalTensor(shape = (2, 2), data = np.zeros((2, 2, 2))) def dataShapeErrorFunc(): _ = DiagonalTensor(shape = (2, 2), data = np.zeros((2, 3))) # no error in 9be9325, newly added self.assertRaises(ValueError, dataDimErrorFunc) self.assertRaises(ValueError, dataShapeErrorFunc) # now start (shape = None) tests a = DiagonalTensor(labels = ['a', 'b'], data = np.zeros(3)) self.assertEqual(a._length, 3) self.assertEqual(a.shape, (3, 3)) self.assertEqual(a.dim, 2) a = DiagonalTensor(labels = ['a', 'b'], data = np.zeros((4, 4))) self.assertEqual(a._length, 4) self.assertEqual(a.shape, (4, 4)) self.assertEqual(a.dim, 2) a = DiagonalTensor(labels = ['a', 'b'], data = np.array([[1, 2], [3, 4]])) self.assertEqual(a._length, 2) self.assertEqual(a.shape, (2, 2)) self.assertTrue((a.a == np.array([1, 4])).all()) def dataDimErrorFunc2(): _ = DiagonalTensor(labels = ['a', 'b', 'c'], data = np.zeros((2, 2))) def dataShapeErrorFunc2(): _ = DiagonalTensor(labels = ['a', 'b', 'c'], data = np.zeros((2, 2, 3))) def dataNoneErrorFunc(): _ = DiagonalTensor(labels = ['a', 'b', 'c'], data = None) self.assertRaises(ValueError, dataDimErrorFunc2) self.assertRaises(ValueError, dataShapeErrorFunc2) self.assertRaises(ValueError, dataNoneErrorFunc) # now start(shape = None, labels = None) a = DiagonalTensor(data = np.zeros(2)) # 1D diagonal tensor as a simple vector self.assertEqual(a.dim, 1) self.assertEqual(a._length, 2) self.assertListEqual(a.labels, ['a']) a = DiagonalTensor(data = np.array([[1, 2], [4, 3]])) self.assertEqual(a.dim, 2) self.assertEqual(a._length, 2) self.assertTrue((a.a == np.array([1, 3])).all()) def dataShapeErrorFunc3(): _ = DiagonalTensor(data = np.zeros((2, 2, 3))) def nothingErrorFunc(): _ = DiagonalTensor() self.assertRaises(ValueError, dataShapeErrorFunc3) self.assertRaises(ValueError, nothingErrorFunc) self.showTestCaseEnd("diagonal tensor shape deduction")
def deduction(self, legs, shape, labels, data, isTensorLike=False): # print('deduction(legs = {}, shape = {}, labels = {}, data = {}, isTensorLike = {})'.format(legs, shape, labels, data, isTensorLike)) """ Deduce the legs, shape, labels and data from the input of user. Guess the missing information if not provided. For details, check "Notes" of comments for Tensor. Parameters ---------- legs : None or list of Leg Legs of the tensor that already existed before creating the tensor. If None, then automatically generated. shape: None or tuple of int Expected shape for the tensor. labels : None or list of str The labels to be added to the legs of this tensor. data : None or ndarray of float The data to be put in the tensor, None for randomly generated. isTensorLike : bool, default False Whether we are working for a TensorLike object: if True, then data is None. Returns ------- legs : list of Leg shape: tuple of int labels : list of str The labels to be added to the legs of this tensor. data : None or ndarray of float The data to be put in the tensor. None for isTensorLike = True case. Notes ----- Although each of the first 4 parameters can be None by default, user must provide enough information for the deduction of the real shape, labels, legs and data(if not TensorLike). """ funcName = "Tensor.deduction" if (legs is not None): if (not self.checkLegsLabelsCompatible(legs=legs, labels=labels)): raise ValueError( funcs.errorMessage( 'labels {} is not compatible with legs {}'.format( labels, legs), location=funcName)) if (labels is None): labels = [leg.name for leg in legs] if (not self.checkLegsShapeCompatible(legs=legs, shape=shape)): raise ValueError( funcs.errorMessage( 'shape {} is not compatible with legs {}'.format( shape, legs), location=funcName)) if (shape is None): shape = tuple([leg.dim for leg in legs]) if (not self.checkShapeDataCompatible(shape=shape, data=data)): raise ValueError( funcs.errorMessage( 'data shape {} is not compatible with required shape {}' .format(data.shape, shape), location=funcName)) elif (shape is not None): if (not self.checkShapeLabelsCompatible(shape=shape, labels=labels)): raise ValueError( funcs.errorMessage( 'labels {} is not compatible with required shape {}'. format(labels, shape), location=funcName)) if (labels is None): labels = self.generateLabels(len(shape)) if (not self.checkShapeDataCompatible(shape=shape, data=data)): raise ValueError( funcs.errorMessage( 'data shape {} is not compatible with required shape {}' .format(data.shape, shape), location=funcName)) elif (data is not None): shape = data.shape if (not self.checkShapeLabelsCompatible(shape=shape, labels=labels)): raise ValueError( funcs.errorMessage( 'labels {} is not compatible with required shape {}'. format(labels, shape), location=funcName)) if (labels is None): labels = self.generateLabels(len(shape)) else: raise ValueError( funcs.errorMessage( "Tensor() cannot accept parameters where legs, shape and data being None simultaneously.", location=funcName)) data = self.generateData(shape=shape, data=data, isTensorLike=isTensorLike) if (legs is None): legs = [] for label, dim in zip(labels, list(shape)): legs.append(Leg(self, dim, label)) else: for leg in legs: leg.tensor = self return legs, shape, labels, data
def SchimdtDecomposition(ta, tb, chi, squareRootSeparation=False, swapLabels=([], []), singularValueEps=1e-10): ''' Schimdt decomposition between tensor ta and tb return ta, s, tb ta should be in canonical form, that is, a a^dagger = I to do this, first contract ta and tb, while keeping track of legs from a and legs from b then SVD over the matrix, take the required chi singular values take first chi eigenvectors for a and b, create a diagonal tensor for singular value tensor if squareRootSeparation is True: then divide s into two square root diagonal tensors and contract each into ta and tb, return ta, None, tb if swapLabels is not ([], []): swap the two set of labels for output, so we swapped the locations of two tensors on MPS e.g. t[i], t[i + 1] = SchimdtDecomposition(t[i], t[i + 1], chi = chi, squareRootSeparation = True, swapLabels = (['o'], ['o'])) we can swap the two tensors t[i] & t[i + 1], both have an "o" leg connected to outside while other legs(e.g. internal legs in MPS, usually 'l' and 'r') will not be affected ''' funcName = 'CTL.examples.Schimdt.SchimdtDecomposition' sb = shareBonds(ta, tb) assert (len(sb) > 0), funcs.errorMessage( "Schimdt Decomposition cannot accept two tensors without common bonds, {} and {} gotten." .format(ta, tb), location=funcName) assert (ta.tensorLikeFlag == tb.tensorLikeFlag), funcs.errorMessage( "Schimdt Decomposition must havge two objects being either Tensor or TensorLike simultaneously, but {} and {} obtained." .format(ta, tb), location=funcName) TLFlag = ta.tensorLikeFlag sbDim = funcs.tupleProduct(tuple([bond.legs[0].dim for bond in sb])) sharedLabelA = sb[0].sideLeg(ta).name sharedLabelB = sb[0].sideLeg(tb).name # if (sharedLabelA.startswith('a-')): # raise ValueError(funcs.errorMessage(err = "shared label {} of tensor A starts with 'a-'.".format(sharedLabelA), location = funcName)) # if (sharedLabelB.startswith('b-')): # raise ValueError(funcs.errorMessage(err = "shared label {} of tensor B starts with 'b-'.".format(sharedLabelB), location = funcName)) # assert (ta.xp == tb.xp), funcs.errorMessage("Schimdt Decomposition cannot accept two tensors with different xp: {} and {} gotten.".format(ta.xp, tb.xp), location = funcName) assert (len(swapLabels[0]) == len(swapLabels[1])), funcs.errorMessage( err="invalid swap labels {}.".format(swapLabels), location=funcName) assert ta.labelsInTensor(swapLabels[0]), funcs.errorMessage( err="{} not in tensor {}.".format(swapLabels[0], ta), location=funcName) assert tb.labelsInTensor(swapLabels[1]), funcs.errorMessage( err="{} not in tensor {}.".format(swapLabels[1], tb), location=funcName) ta.addTensorTag('a') tb.addTensorTag('b') for swapLabel in swapLabels[0]: ta.renameLabel('a-' + swapLabel, 'b-' + swapLabel) for swapLabel in swapLabels[1]: tb.renameLabel('b-' + swapLabel, 'a-' + swapLabel) tot = contractTwoTensors(ta, tb) legA = [leg for leg in tot.legs if leg.name.startswith('a-')] legB = [leg for leg in tot.legs if leg.name.startswith('b-')] labelA = [leg.name for leg in legA] labelB = [leg.name for leg in legB] # not remove a- and b- here, since we need to add an internal leg, and we need to distinguish it from others shapeA = tuple([leg.dim for leg in legA]) shapeB = tuple([leg.dim for leg in legB]) totShapeA = funcs.tupleProduct(shapeA) totShapeB = funcs.tupleProduct(shapeB) if (TLFlag): u = None vh = None s = None chi = min([chi, totShapeA, totShapeB, sbDim]) else: mat = tot.toMatrix(rows=labelA, cols=labelB) # np = ta.xp # default numpy u, s, vh = xplib.xp.linalg.svd(mat) chi = min([ chi, totShapeA, totShapeB, funcs.nonZeroElementN(s, singularValueEps) ]) u = u[:, :chi] s = s[:chi] vh = vh[:chi] if (squareRootSeparation): if (TLFlag): uS = None vS = None else: sqrtS = xplib.xp.sqrt(s) uS = funcs.rightDiagonalProduct(u, sqrtS) vS = funcs.leftDiagonalProduct(vh, sqrtS) outLegForU = Leg(None, chi, name=sharedLabelA) # inLegForU = Leg(None, chi, name = sharedLabelB) # internalLegForS1 = Leg(None, chi, name = 'o') # internalLegForS2 = Leg(None, chi, name = 'o') # inLegForV = Leg(None, chi, name = sharedLabelA) outLegForV = Leg(None, chi, name=sharedLabelB) uTensor = Tensor(data=uS, legs=legA + [outLegForU], shape=shapeA + (chi, ), tensorLikeFlag=TLFlag) # s1Tensor = DiagonalTensor(data = xplib.xp.sqrt(s), legs = [inLegForU, internalLegForS1], shape = (chi, chi)) # s2Tensor = DiagonalTensor(data = xplib.xp.sqrt(s), legs = [internalLegForS2, inLegForV], shape = (chi, chi)) vTensor = Tensor(data=vS, legs=[outLegForV] + legB, shape=(chi, ) + shapeB, tensorLikeFlag=TLFlag) # legs should be automatically set by Tensor / DiagonalTensor, so no need for setTensor # outLegForU.setTensor(uTensor) # outLegForV.setTensor(vTensor) # inLegForU.setTensor(sTensor) # inLegForV.setTensor(sTensor) # remove a- and b- for leg in legA: if (leg.name.startswith('a-')): leg.name = leg.name[2:] for leg in legB: if (leg.name.startswith('b-')): leg.name = leg.name[2:] makeLink(outLegForU, outLegForV) # makeLink(outLegForU, inLegForU) # makeLink(outLegForV, inLegForV) # makeLink(internalLegForS1, internalLegForS2) # uTensor = contractTwoTensors(uTensor, s1Tensor) # vTensor = contractTwoTensors(vTensor, s2Tensor) return uTensor, None, vTensor outLegForU = Leg(None, chi, name=sharedLabelA) inLegForU = Leg(None, chi, name=sharedLabelB) inLegForV = Leg(None, chi, name=sharedLabelA) outLegForV = Leg(None, chi, name=sharedLabelB) uTensor = Tensor(data=u, legs=legA + [outLegForU], shape=shapeA + (chi, ), tensorLikeFlag=TLFlag) sTensor = DiagonalTensor(data=s, legs=[inLegForU, inLegForV], shape=(chi, chi), tensorLikeFlag=TLFlag) vTensor = Tensor(data=vh, legs=[outLegForV] + legB, shape=(chi, ) + shapeB, tensorLikeFlag=TLFlag) # legs should be automatically set by Tensor / DiagonalTensor, so no need for setTensor # outLegForU.setTensor(uTensor) # outLegForV.setTensor(vTensor) # inLegForU.setTensor(sTensor) # inLegForV.setTensor(sTensor) # remove a- and b- for leg in legA: if (leg.name.startswith('a-')): leg.name = leg.name[2:] for leg in legB: if (leg.name.startswith('b-')): leg.name = leg.name[2:] makeLink(outLegForU, inLegForU) makeLink(outLegForV, inLegForV) return uTensor, sTensor, vTensor
def deduction(self, legs, data, labels, shape, isTensorLike = False): """ For more information, check Tensor.deduction """ # in Tensor: the "shape" has the highest priority # so if the shape is given here, it should be taken # however, if the shape is given as an integer: then we need to deduce the dimension # if shape exist: then according to shape(but dim may be deduced) # otherwise, if labels exist, then dim from labels, and l from data # otherwise, both dim and l from data funcName = "DiagonalTensor.deduction" # first, consider scalar case if (legs is None) and (labels is None) and (shape == () or ((data is not None) and (data.shape == ()))): if (data is None) and (not isTensorLike): data = xplib.xp.array(1.0) return [], data, [], () # scalar if (legs is not None): if (not self.checkLegsDiagonalCompatible(legs = legs)): raise ValueError(funcs.errorMessage('legs {} cannot be considered as legs for diagonal tensor.'.format(legs), location = funcName)) if (not self.checkLegsLabelsCompatible(legs = legs, labels = labels)): raise ValueError(funcs.errorMessage('labels {} is not compatible with legs {}'.format(labels, legs), location = funcName)) if (labels is None): labels = [leg.name for leg in legs] if (not self.checkLegsShapeCompatible(legs = legs, shape = shape)): raise ValueError(funcs.errorMessage('shape {} is not compatible with legs {}'.format(shape, legs), location = funcName)) if (shape is None) or (isinstance(shape, int)): shape = tuple([leg.dim for leg in legs]) if (not self.checkShapeDataCompatible(shape = shape, data = data)): raise ValueError(funcs.errorMessage('data shape {} is not compatible with required shape {}'.format(data.shape, shape), location = funcName)) elif (shape is not None): if (isinstance(shape, int)): dim = self.deduceDimension(data = data, labels = labels) shape = tuple([shape] * dim) if (not self.checkShapeDiagonalCompatible(shape = shape)): raise ValueError(funcs.errorMessage('shape {} cannot be considered as shape for diagonal tensor.'.format(shape), location = funcName)) if (not self.checkShapeLabelsCompatible(shape = shape, labels = labels)): raise ValueError(funcs.errorMessage('labels {} is not compatible with required shape {}'.format(labels, shape), location = funcName)) if (labels is None): labels = self.generateLabels(len(shape)) if (not self.checkShapeDataCompatible(shape = shape, data = data)): raise ValueError(funcs.errorMessage('data shape {} is not compatible with required shape {}'.format(data.shape, shape), location = funcName)) elif (data is not None): # legs, shape are both None shape = data.shape if (not self.checkShapeDiagonalCompatible(shape = shape)): raise ValueError(funcs.errorMessage('data shape {} cannot be considered as shape for diagonal tensor.'.format(shape), location = funcName)) dim = self.deduceDimension(data = data, labels = labels) if (len(shape) == 1) and (dim > 1): shape = tuple([shape[0]] * dim) if (not self.checkShapeLabelsCompatible(shape = shape, labels = labels)): raise ValueError(funcs.errorMessage('labels {} is not compatible with required shape {}'.format(labels, shape), location = funcName)) if (labels is None): labels = self.generateLabels(len(shape)) else: raise ValueError(funcs.errorMessage("Tensor() cannot accept parameters where legs, shape and data being None simultaneously.", location = funcName)) # elif (shape is not None): # if (isinstance(shape, int)): # dim = self.deduceDimension(data, labels) # l = shape # else: # dim = len(shape) # if (dim == 0) or (not funcs.checkAllEqual(shape)): # raise ValueError(funcs.errorMessage(location = funcName, err = "shape {} is not valid.".format(shape))) # l = shape[0] # # then we need to deduce dimension # if (labels is not None) and (len(labels) != dim): # raise ValueError(funcs.errorMessage(location = funcName, err = "number of labels is not the same as dim: {} expected but {} obtained.".format(dim, len(labels)))) # elif (data is not None): # # data can be either shape, or an array of l # if (len(data.shape) == 1): # if (data.shape[0] != l): # raise ValueError(funcs.errorMessage(location = funcName, err = "data length is not the same as length deduced from shape: {} expected but {} obtained.".format(l, data.shape[0]))) # elif (len(data.shape) != dim) or (data.shape != tuple([l] * dim)): # raise ValueError(funcs.errorMessage(location = funcName, err = "data shape is not correct: {} expected but {} obtained.".format(tuple([l] * dim), data.shape))) # # shape is None, how to deduce shape? # elif (labels is not None): # dim = len(labels) # if (data is None): # raise ValueError(funcs.errorMessage(location = funcName, err = "cannot deduce data shape since data and shape are both None.")) # elif (len(data.shape) == 1): # l = len(data) # elif not funcs.checkAllEqual(data.shape): # raise ValueError(funcs.errorMessage(location = funcName, err = "data.shape {} is not valid.".format(data.shape))) # else: # if (len(data.shape) != dim): # raise ValueError(funcs.errorMessage(location = funcName, err = "dimension of data is not compatible with dimension deduced from labels: expect {} but {} is given.".format(dim, len(data.shape)))) # l = data.shape[0] # else: # # deduce from data.shape # if (data is None): # raise ValueError(funcs.errorMessage(location = funcName, err = "data, labes and shape are all None.")) # elif not funcs.checkAllEqual(data.shape): # raise ValueError(funcs.errorMessage(location = funcName, err = "data.shape {} is not valid.".format(data.shape))) # else: # dim = len(data.shape) # l = data.shape[0] # print('l = {}, dim = {}'.format(l, dim)) # shape = tuple([l] * dim) data = self.generateData(shape = shape, data = data, isTensorLike = isTensorLike) # if (tensorLikeFlag): # data = None # elif (data is None): # # default is identity # data = xplib.xp.ones(l) # elif (len(data.shape) == 1): # data = xplib.xp.copy(data) # else: # data = xplib.xp.array([data[tuple([x] * dim)] for x in range(l)]) # must be a copy of original "data" if exist # if (labels is None): # labels = self.generateLabels(dim) if (legs is None): legs = [] for label, dim in zip(labels, list(shape)): legs.append(Leg(self, dim, label)) else: for leg in legs: leg.tensor = self return legs, data, labels, shape
def test_TensorDeduction(self): # test the deduction of Tensor # deduce strategy: # we want shape, and labels # we have legs, shape, labels, data # priority for shape: legs > shape > data # priority for labels: legs > labels # 1. legs exist: # if labels exist: check the length and content of labels with legs # if shape exist: check whether shape == tuple([leg.dim for leg in legs]) # if data exist: check whehter data.shape == tuple([leg.dim for leg in legs]) (if not, but the total size equal, we transfer data to the given shape) # 3. legs not exist, shape not exist # if data exist: generate shape according to data, and auto-generate legs legA = Leg(tensor = None, dim = 5, name = 'a') legB = Leg(tensor = None, dim = 6, name = 'b') a = Tensor(legs = [legA, legB]) self.assertTupleEqual(a.shape, (5, 6)) self.assertEqual(a.dim, 2) self.assertTrue(funcs.compareLists(a.labels, ['a', 'b'])) a = Tensor(legs = [legA, legB], labels = ['a', 'b']) self.assertTupleEqual(a.shape, (5, 6)) self.assertEqual(a.dim, 2) self.assertTrue(funcs.compareLists(a.labels, ['a', 'b'])) def labelsSizeNotEqualFunc(): _ = Tensor(legs = [legA, legB], labels = ['a']) def labelsOrderNotEqualFunc(): _ = Tensor(legs = [legA, legB], labels = ['b', 'a']) def shapeSizeNotEqualFunc(): _ = Tensor(legs = [legA, legB], shape = (5, 6, 7)) def shapeOrderNotEqualFunc(): _ = Tensor(legs = [legA, legB], shape = (6, 5)) def dataDimNotEqualFunc(): _ = Tensor(legs = [legA, legB], data = np.zeros((5, 6, 7))) def dataShapeNotEqualFunc(): _ = Tensor(legs = [legA, legB], data = np.zeros((5, 5))) self.assertRaises(ValueError, labelsSizeNotEqualFunc) self.assertRaises(ValueError, labelsOrderNotEqualFunc) self.assertRaises(ValueError, shapeSizeNotEqualFunc) self.assertRaises(ValueError, shapeOrderNotEqualFunc) self.assertRaises(ValueError, dataDimNotEqualFunc) self.assertRaises(ValueError, dataShapeNotEqualFunc) # for data, works if (data size) = (dim product), no matter what the shape of data a = Tensor(legs = [legA, legB], data = np.zeros((6, 5))) self.assertTupleEqual(a.shape, (5, 6)) self.assertEqual(a.dim, 2) self.assertTrue(funcs.compareLists(a.labels, ['a', 'b'])) a = Tensor(legs = [legA, legB], data = np.zeros((3, 2, 5))) self.assertTupleEqual(a.shape, (5, 6)) self.assertEqual(a.dim, 2) self.assertTrue(funcs.compareLists(a.labels, ['a', 'b'])) a = Tensor(legs = [legA, legB], data = np.zeros(30)) self.assertTupleEqual(a.shape, (5, 6)) self.assertEqual(a.dim, 2) self.assertTrue(funcs.compareLists(a.labels, ['a', 'b'])) # 2. legs not exist, shape exist # if data exist, check the total number of components of data equal to shape, otherwise random # if labels exist: check the number of labels equal to dimension, otherwise auto-generate a = Tensor(shape = (5, 3, 4)) self.assertTupleEqual(a.shape, (5, 3, 4)) self.assertTrue(funcs.compareLists(a.labels, ['a', 'b', 'c'])) self.assertEqual(a.dim, 3) a = Tensor(shape = (5, 3, 4), data = np.zeros(60)) self.assertTupleEqual(a.shape, (5, 3, 4)) self.assertTrue(funcs.compareLists(a.labels, ['a', 'b', 'c'])) self.assertEqual(a.dim, 3) self.assertEqual(a.a[(0, 0, 0)], 0) a = Tensor(shape = (5, 3, 4), labels = ['c', 'b', 'a']) self.assertTupleEqual(a.shape, (5, 3, 4)) self.assertTrue(funcs.compareLists(a.labels, ['c', 'b', 'a'])) self.assertEqual(a.dim, 3) a = Tensor(shape = (5, 3, 4), labels = ('c', 'b', 'a')) # tuple also is acceptable self.assertTupleEqual(a.shape, (5, 3, 4)) self.assertTrue(funcs.compareLists(a.labels, ['c', 'b', 'a'])) self.assertEqual(a.dim, 3) a = Tensor(shape = (5, 3, 4), labels = ['c', 'b', 'a'], data = np.zeros(60)) self.assertTupleEqual(a.shape, (5, 3, 4)) self.assertTrue(funcs.compareLists(a.labels, ['c', 'b', 'a'])) self.assertEqual(a.dim, 3) def dataTotalSizeNotEqualErrorFunc(): _ = Tensor(shape = (5, 3, 4), data = np.zeros((5, 3, 3))) def labelsSizeNotEqualFunc2(): _ = Tensor(shape = (5, 3, 4), labels = ['a', 'b']) self.assertRaises(ValueError, dataTotalSizeNotEqualErrorFunc) self.assertRaises(ValueError, labelsSizeNotEqualFunc2) # 3. legs not exist, shape not exist # if data exist: generate shape according to data, and auto-generate legs a = Tensor(data = np.zeros((3, 4, 5))) self.assertTupleEqual(a.shape, (3, 4, 5)) self.assertEqual(a.dim, 3) self.assertEqual(a.legs[0].dim, 3) self.assertEqual(a.legs[0].name, 'a') def nothingErrorFunc(): _ = Tensor() self.assertRaises(ValueError, nothingErrorFunc)
def test_TensorLike(self): # the deduction rules are just the same as Tensor # the only difference is that data is None, and tensorLikeFlag is True legA = Leg(tensor = None, dim = 5, name = 'a') legB = Leg(tensor = None, dim = 6, name = 'b') a = Tensor(legs = [legA, legB], tensorLikeFlag = True) self.assertTupleEqual(a.shape, (5, 6)) self.assertEqual(a.dim, 2) self.assertTrue(funcs.compareLists(a.labels, ['a', 'b'])) self.assertTrue(a.tensorLikeFlag) self.assertIsNone(a.a) a = Tensor(legs = [legA, legB], labels = ['a', 'b'], tensorLikeFlag = True) self.assertTupleEqual(a.shape, (5, 6)) self.assertEqual(a.dim, 2) self.assertTrue(funcs.compareLists(a.labels, ['a', 'b'])) self.assertTrue(a.tensorLikeFlag) self.assertIsNone(a.a) def labelsSizeNotEqualFunc(): _ = Tensor(legs = [legA, legB], labels = ['a'], tensorLikeFlag = True) def labelsOrderNotEqualFunc(): _ = Tensor(legs = [legA, legB], labels = ['b', 'a'], tensorLikeFlag = True) def shapeSizeNotEqualFunc(): _ = Tensor(legs = [legA, legB], shape = (5, 6, 7), tensorLikeFlag = True) def shapeOrderNotEqualFunc(): _ = Tensor(legs = [legA, legB], shape = (6, 5), tensorLikeFlag = True) def dataDimNotEqualFunc(): _ = Tensor(legs = [legA, legB], data = np.zeros((5, 6, 7)), tensorLikeFlag = True) def dataShapeNotEqualFunc(): _ = Tensor(legs = [legA, legB], data = np.zeros((5, 5)), tensorLikeFlag = True) self.assertRaises(ValueError, labelsSizeNotEqualFunc) self.assertRaises(ValueError, labelsOrderNotEqualFunc) self.assertRaises(ValueError, shapeSizeNotEqualFunc) self.assertRaises(ValueError, shapeOrderNotEqualFunc) self.assertRaises(ValueError, dataDimNotEqualFunc) self.assertRaises(ValueError, dataShapeNotEqualFunc) # for data, works if (data size) = (dim product), no matter what the shape of data a = Tensor(legs = [legA, legB], data = np.zeros((6, 5)), tensorLikeFlag = True) self.assertTupleEqual(a.shape, (5, 6)) self.assertEqual(a.dim, 2) self.assertTrue(funcs.compareLists(a.labels, ['a', 'b'])) self.assertTrue(a.tensorLikeFlag) self.assertIsNone(a.a) a = Tensor(legs = [legA, legB], data = np.zeros((3, 2, 5)), tensorLikeFlag = True) self.assertTupleEqual(a.shape, (5, 6)) self.assertEqual(a.dim, 2) self.assertTrue(funcs.compareLists(a.labels, ['a', 'b'])) self.assertTrue(a.tensorLikeFlag) self.assertIsNone(a.a) a = Tensor(legs = [legA, legB], data = np.zeros(30), tensorLikeFlag = True) self.assertTupleEqual(a.shape, (5, 6)) self.assertEqual(a.dim, 2) self.assertTrue(funcs.compareLists(a.labels, ['a', 'b'])) self.assertTrue(a.tensorLikeFlag) self.assertIsNone(a.a) # 2. legs not exist, shape exist # if data exist, check the total number of components of data equal to shape, otherwise random # if labels exist: check the number of labels equal to dimension, otherwise auto-generate a = Tensor(shape = (5, 3, 4), tensorLikeFlag = True) self.assertTupleEqual(a.shape, (5, 3, 4)) self.assertTrue(funcs.compareLists(a.labels, ['a', 'b', 'c'])) self.assertEqual(a.dim, 3) self.assertTrue(a.tensorLikeFlag) self.assertIsNone(a.a) a = Tensor(shape = (5, 3, 4), data = np.zeros(60), tensorLikeFlag = True) self.assertTupleEqual(a.shape, (5, 3, 4)) self.assertTrue(funcs.compareLists(a.labels, ['a', 'b', 'c'])) self.assertEqual(a.dim, 3) # self.assertEqual(a.a[(0, 0, 0)], 0) self.assertTrue(a.tensorLikeFlag) self.assertIsNone(a.a) a = Tensor(shape = (5, 3, 4), labels = ['c', 'b', 'a'], tensorLikeFlag = True) self.assertTupleEqual(a.shape, (5, 3, 4)) self.assertTrue(funcs.compareLists(a.labels, ['c', 'b', 'a'])) self.assertEqual(a.dim, 3) self.assertTrue(a.tensorLikeFlag) self.assertIsNone(a.a) a = Tensor(shape = (5, 3, 4), labels = ('c', 'b', 'a'), tensorLikeFlag = True) # tuple also is acceptable self.assertTupleEqual(a.shape, (5, 3, 4)) self.assertTrue(funcs.compareLists(a.labels, ['c', 'b', 'a'])) self.assertEqual(a.dim, 3) self.assertTrue(a.tensorLikeFlag) self.assertIsNone(a.a) a = Tensor(shape = (5, 3, 4), labels = ['c', 'b', 'a'], data = np.zeros(60), tensorLikeFlag = True) self.assertTupleEqual(a.shape, (5, 3, 4)) self.assertTrue(funcs.compareLists(a.labels, ['c', 'b', 'a'])) self.assertEqual(a.dim, 3) self.assertTrue(a.tensorLikeFlag) self.assertIsNone(a.a) def dataTotalSizeNotEqualErrorFunc(): _ = Tensor(shape = (5, 3, 4), data = np.zeros((5, 3, 3)), tensorLikeFlag = True) def labelsSizeNotEqualFunc2(): _ = Tensor(shape = (5, 3, 4), labels = ['a', 'b']) self.assertRaises(ValueError, dataTotalSizeNotEqualErrorFunc) self.assertRaises(ValueError, labelsSizeNotEqualFunc2) # 3. legs not exist, shape not exist # if data exist: generate shape according to data, and auto-generate legs a = Tensor(data = np.zeros((3, 4, 5)), tensorLikeFlag = True) self.assertTupleEqual(a.shape, (3, 4, 5)) self.assertEqual(a.dim, 3) self.assertEqual(a.legs[0].dim, 3) self.assertEqual(a.legs[0].name, 'a') self.assertTrue(a.tensorLikeFlag) self.assertIsNone(a.a) self.assertTrue(a.__str__().find('TensorLike') != -1) self.assertTrue(a.__repr__().find('TensorLike') != -1) def nothingErrorFunc(): _ = Tensor(tensorLikeFlag = True) self.assertRaises(ValueError, nothingErrorFunc)