def _partial_product(i, a, b): r""" Parameters ---------- i : {int, None} a : (..., n, ...) ndarray b : (..., m, ...) ndarray Returns ------- mat : (n, m) ndarray Or return a float if i is None. """ if i is None: a = np.reshape(a, -1) b = np.reshape(b, -1) else: n = a.shape[i] m = b.shape[i] a = np.moveaxis(a, i, 0) a = np.reshape(a, (n, -1)) b = np.moveaxis(b, i, -1) b = np.reshape(b, (-1, m)) mat = np.dot(a, b) return mat
def partial_trace(array1, i, array2, j): r"""Partial trace of 2 tensors, return a matrix. +----+ | | -i- 1 -- 2 -j- Parameters ---------- array1 : ndarray i : {int, None} if i is None then j must be None. array2 : ndarray j : {int, None} if j is None then i must be None. Returns ------- ans : ndarray Of shape `(n, m)` """ if i is not None and j is not None: shape_1, shape_2 = map(list, (array1.shape, array2.shape)) n, m = shape_1[i], shape_2[j] array1 = np.moveaxis(array1, i, 0) array1 = np.reshape(array1, (n, -1)) array2 = np.moveaxis(array2, j, -1) array2 = np.reshape(array2, (-1, m)) elif i is None and j is None: array1 = np.reshape(array1, -1) array2 = np.reshape(array2, -1) else: raise TypeError('Invalid parameters i={} and j={}!'.format(i, j)) ans = np.dot(array1, array2) return ans
def __partial_product(array1, i, array2, j): r"""Times a matrix to a tensor. | -- 1 -i--j- 2 -- | Parameters ---------- array1 : ndarray i : int array2 : 2-d ndarray j : int Returns ------- ans : ndarray """ shape_1, shape_2 = map(list, (array1.shape, array2.shape)) n, m = shape_1.pop(i), shape_2.pop(j) new_shape = shape_1[:i] + shape_2 + shape_1[i:] array1 = np.moveaxis(array1, i, -1) array1 = np.reshape(array1, (-1, n)) array2 = np.moveaxis(array2, j, 0) array2 = np.reshape(array2, (m, -1)) ans = np.dot(array1, array2) ans = np.reshape(ans, shape_1 + [-1]) ans = np.moveaxis(ans, -1, i) ans = np.reshape(ans, new_shape) return ans
def normalize(self, forced=False): """Normalize the array of self. Only work when self.normalized. Set `forced` to `True` to normalize any way. """ array = self.array if array is None or (not self.normalized and not forced): return axis = self.axis if axis is None: norm = np.array(self.local_norm()) self.set_array(array / norm) ans = norm else: norm = linalg.norm shape = self.shape dim = shape.pop(axis) array = np.reshape(np.moveaxis(array, axis, 0), (dim, -1)) vecs = [] norm_list = [] for vec_i in array: for vec_j in vecs: vec_i -= vec_j * np.dot(np.conj(vec_j), vec_i) norm_ = norm(vec_i) vecs.append(vec_i / norm_) norm_list.append(norm_) array = np.array(vecs) array = np.moveaxis(np.reshape(array, [-1] + shape), 0, axis) self.set_array(array) ans = norm_list return ans
def autocomplete(root, n_bond_dict): """Autocomplete the tensors linked to `self.root` with suitable initial value. Parameters ---------- root : Tensor n_bond_dict : {Leaf: int} A dictionary to specify the dimensions of each primary basis. """ for t in root.visitor(leaf=False): if t.array is None: axis = t.axis n_children = [] for i, child, j in t.children(): n_children.append(n_bond_dict[(t, i, child, j)]) if axis is not None: p, p_i = t[axis] n_parent = n_bond_dict[(p, p_i, t, axis)] shape = [n_parent] + n_children else: n_parent = 1 shape = n_children array = np.zeros((n_parent, np.prod(n_children))) for n, v_i in zip(triangular(n_children), array): v_i[n] = 1. array = np.reshape(array, shape) if axis is not None: array = np.moveaxis(array, 0, axis) t.set_array(array) t.normalize(forced=True) assert ( t.axis is None or np.linalg.matrix_rank(t.local_norm()) == t.shape[t.axis] ) if __debug__: for t in root.visitor(): t.check_completness(strict=True) return
def projector(self, comp=False): """[Deprecated] Return the projector corresponding to self. Returns ------- ans : ndarray """ axis = self.axis if axis is not None: array = self.array shape = self.shape dim = shape.pop(self.axis) comp_dim = np.prod(shape) array = np.moveaxis(array, axis, -1) array = np.reshape(array, (-1, dim)) array_h = np.conj(np.transpose(array)) ans = np.dot(array, array_h) if comp: identity = np.identity(comp_dim) ans = identity - ans ans = np.reshape(ans, shape * 2) return ans else: raise RuntimeError('Need to specific the normalization axis!')
def autocomplete(self, n_bond_dict, max_entangled=False): """Autocomplete the tensors linked to `self.root` with suitable initial value. Parameters ---------- n_bond_dict : {Leaf: int} A dictionary to specify the dimensions of each primary basis. max_entangled : bool Whether to use the max entangled state as initial value (for finite temperature and imaginary-time propagation). Default is `False`. """ for t in self.root.visitor(leaf=False): if t.array is None: axis = t.axis if max_entangled and not any(t.children(leaf=False)): if len(list(t.children(leaf=True))) != 2 or axis is None: raise RuntimeError('Not correct tensor graph for FT.') for i, leaf, j in t.children(): if not leaf.name.endswith("'"): n_leaf = n_bond_dict[(t, i, leaf, j)] break p, p_i = t[axis] n_parent = n_bond_dict[(p, p_i, t, axis)] vec_i = np.diag(np.ones((n_leaf, )) / np.sqrt(n_leaf)) vec_i = np.reshape(vec_i, -1) init_vecs = [vec_i] print(np.shape(init_vecs), np.shape(self._local_matvec(leaf))) da = DavidsonAlgorithm(self._local_matvec(leaf), init_vecs=init_vecs, n_vals=n_parent) array = da.kernel(search_mode=True) if len(array) >= n_parent: array = array[:n_parent] else: for j in range(n_parent - len(array)): v = np.zeros((n_leaf**2, )) v[j] = 1.0 array.append(v) assert len(array) == n_parent assert np.allclose(array[0], vec_i) array = np.reshape(array, (n_parent, n_leaf, n_leaf)) else: n_children = [] for i, child, j in t.children(): n_children.append(n_bond_dict[(t, i, child, j)]) if axis is not None: p, p_i = t[axis] n_parent = n_bond_dict[(p, p_i, t, axis)] shape = [n_parent] + n_children else: n_parent = 1 shape = n_children array = np.zeros((n_parent, np.prod(n_children))) for n, v_i in zip(self.triangular(n_children), array): v_i[n] = 1. array = np.reshape(array, shape) if axis is not None: array = np.moveaxis(array, 0, axis) t.set_array(array) t.normalize(forced=True) assert (t.axis is None or np.linalg.matrix_rank(t.local_norm()) == t.shape[t.axis]) if __debug__: for t in self.root.visitor(): t.check_completness(strict=True) return
def _partial_product(array1, i, array2, j): l1, l2 = array1.ndim, array2.ndim ans = np.tensordot(array1, array2, axes=([i], [j])) ans = np.moveaxis(ans, list(range(l1 - 1, l1 + l2 - 2)), list(range(i, i + l2 - 1))) return ans
def split(self, axis, indice=None, root=None, child=None, rank=None, err=None, normalized=False): """Split the root Tensor to a certain axis/certain axes. Parameters ---------- axis : {int, [int]} rank : int Max rank in SVD err : float Max error in SVD indice : (int, int) Linkage between root and child root : Tensor Tensor to be a new root node. `None` to create a new Tensor. child : Tensor Tensor to be a new child node. `None` to create a new Tensor. Returns ------- root : Tensor New root node in the same environment of self. child : Tensor New child node in the same environment of self. Notes ----- When split a Tensor, this method should let root.unite(i) (i.e. unite with child) be a inversion in terms of the tensor network. """ if self.axis is not None: raise RuntimeError('Can only split the root Tensor!') try: axes1 = list(sorted(axis)) except TypeError: axes1 = [axis] default_indice = (0, axis) else: default_indice = (0, 0) axes2 = [i for i in range(self.order) if i not in axes1] index1, index2 = indice if indice is not None else default_indice # save all data needed in `self` a = self.array children = list(self.children(axis=None)) name = self.name shape = self.shape shape1, shape2 = [shape[i] for i in axes1], [shape[i] for i in axes2] # name settings only for clarity.. if '+' in name: name1, name2 = name.split('+') else: name1, name2 = name + '\'', name # Calculate arrays for new tensors for n, i in enumerate(axes1): a = np.moveaxis(a, i, n) a = np.reshape(a, (np.prod([1] + shape1), np.prod([1] + shape2))) u, s, vh = compressed_svd(a, rank=rank, err=err) root_array = np.reshape(np.dot(u, s), shape1 + [-1]) root_array = np.moveaxis(root_array, -1, index1) child_array = np.reshape(vh, [-1] + shape2) child_array = np.moveaxis(child_array, 0, index2) # Create/write new tensors. cls = type(self) if root is None: root = cls(name=name1, array=root_array, axis=None, normalized=normalized) else: root.axis = None root.set_array(root_array) if child is None: child = cls(name=name2, array=child_array, axis=index2, normalized=normalized) else: child.axis = index2 child.set_array(child_array) # Fix linkage info axes1.insert(index1, None) axes2.insert(index2, None) unlink = self.unlink link = self.link link_info = [(root, index1, child, index2)] for i, t, j in children: is_1 = i in axes1 axes = axes1 if is_1 else axes2 tensor = root if is_1 else child unlink(self, i, t, j) link_info.append((tensor, axes.index(i), t, j)) for linkage in link_info: link(*linkage) return root, child