def parameter_gradient_eval_dmrg(self, alpha, x=None, dmrg_type='dmrg', *args): if self.evaluated_bases: bases_eval = self.bases elif x is not None: bases_eval = self.bases.eval(x) else: raise ValueError('Must provide the evaluation points or the ' + 'bases evaluations.') dims = np.arange(self.tensor.order) if isinstance(self.tensor, tensap.TreeBasedTensor): # Compute fH, the TimesMatrixEvalDiag of f with bases_eval in all # the dimensions except the ones associated with alpha (if alpha # is a leaf node) or with the inactive children of alpha (if # alpha is an internal node). The tensor fH is used to compute # the gradient of f with respect to f.tensor.tensors[alpha-1]. tree = self.tensor.tree if tree.is_leaf[alpha - 1]: dims = dims[tree.dim2ind != alpha] else: children = tree.children(alpha) ind = tensap.fast_intersect( tree.dim2ind, children[np.logical_not( self.tensor.is_active_node[children - 1])]) dims = dims[np.logical_not(np.isin(tree.dim2ind, ind))] fH = self.tensor.tensor_matrix_product( [bases_eval[x] for x in dims], dims) else: if alpha <= self.tensor.order: dims = np.delete(dims, alpha - 1) fH = self.tensor.tensor_matrix_product( [bases_eval[x] for x in dims], dims) grad, g_alpha, g_gamma = \ fH.parameter_gradient_eval_diag_dmrg(alpha, bases_eval) if isinstance(self.tensor, tensap.TreeBasedTensor): # If the order of the children has been modified in grad, compute # the inverse permutation. ch = tree.children(alpha) if ch.size == 0: perm_1 = np.array([0]) else: perm_1 = np.argsort( np.concatenate( (np.atleast_1d(ch[fH.is_active_node[ch - 1]]), np.atleast_1d(ch[np.logical_not( fH.is_active_node[ch - 1])])))) gamma = tree.parent(alpha) ch = tensap.fast_setdiff(tree.children(gamma), alpha) perm_1b = np.argsort( np.concatenate((np.atleast_1d(ch[fH.is_active_node[ch - 1]]), np.atleast_1d(ch[np.logical_not( fH.is_active_node[ch - 1])])))) if dmrg_type == 'dmrg': perm_1 = np.concatenate((perm_1, perm_1.size + perm_1b)) perm_2 = [] if alpha != tree.root and gamma != tree.root: perm_2 = [ fH.tensors[alpha - 1].order + fH.tensors[gamma - 1].order - 2 ] perm_3 = [] if alpha != tree.root and self.tensor.ranks[tree.root - 1] > 1: perm_3 = [grad.order - 1] grad = grad.transpose( np.concatenate( ([0], perm_1 + 1, perm_2, perm_3)).astype(int)) elif dmrg_type == 'dmrg_low_rank': g_alpha = g_alpha.transpose(np.concatenate(([0], perm_1 + 1))) perm_2 = [] if gamma != tree.root: # TODO Checks perm_2 = [fH.tensors[gamma - 1].order - 1] perm_3 = [] if alpha != tree.root and self.tensor.ranks[tree.root - 1] > 1: perm_3 = [grad.order - 1] g_gamma = g_gamma.transpose( np.concatenate( ([0], perm_1b + 1, perm_2, perm_3)).astype(int)) grad = [g_alpha, g_gamma] else: raise ValueError('Wrong DMRG type.') return grad
def hsvd(self, tensor, tree=None, is_active_node=None): ''' Compute the truncated svd in tree-based tensor format of tensor. Parameters ---------- tensor : tensap.FullTensor or tensap.TreeBasedTensor The tensor to truncate. tree : tensap.DimensionTree, optional The tree of the output tree-based tensor. The default is None, indicating if tensor is a tensap.TreeBasedTensor to take tensor.tree. is_active_node : numpy.ndarray, optional Logical array indicating if the nodes are active.. The default is None, indicating if tensor is a tensap.TreeBasedTensor to take tensor.is_active_node. Raises ------ ValueError If the wrong value of the atttribude _hsvd_type is provided. NotImplementedError If the method is not implemented for the format. Returns ------- out : tensap.TreeBasedTensor The truncated tensor in tree-based tensor format. ''' if isinstance(tensor, tensap.TreeBasedTensor): if tree is not None or is_active_node is not None: warnings.warn('The provided tree and/or is_active_node ' 'are not taken into account when x is a ' 'tensap.TreeBasedTensor.') is_active_node = tensor.is_active_node tree = tensor.tree elif is_active_node is None: is_active_node = np.full(tree.nb_nodes, True) max_rank = np.atleast_1d(self.max_rank) if max_rank.size == 1: max_rank = np.repeat(max_rank, tree.nb_nodes) max_rank[tree.root-1] = 1 local_tol = self.tolerance / np.sqrt( np.count_nonzero(is_active_node)-1) if isinstance(tensor, tensap.FullTensor): root_rank_greater_than_one = tensor.order == len(tree.dim2ind)+1 tensors = np.empty(tree.nb_nodes, dtype=object) shape = np.array(tensor.shape) nodes_x = tree.dim2ind ranks = np.ones(tree.nb_nodes, dtype=int) for level in np.arange(np.max(tree.level), 0, -1): for nod in tree.nodes_with_level(level): if is_active_node[nod-1]: if tree.is_leaf[nod-1]: rep = np.nonzero(nod == nodes_x)[0][0] else: children = tree.children(nod) rep = [np.nonzero(np.isin(nodes_x, x))[0][0] for x in children] rep_c = tensap.fast_setdiff(np.arange(nodes_x.size), rep) if root_rank_greater_than_one: rep_c = np.concatenate((rep_c, [tensor.order-1])) self.max_rank = max_rank[nod-1] tmp = self.trunc_svd(tensor.matricize(rep).numpy(), local_tol) tensors[nod-1] = tmp.space[0] ranks[nod-1] = tensors[nod-1].shape[1] shape_loc = np.hstack((shape[rep], ranks[nod-1])) tensors[nod-1] = tensap.FullTensor(tensors[nod-1], shape=shape_loc) tmp = np.matmul(tmp.space[1], np.diag(tmp.core.data)) shape = np.hstack((shape[rep_c], ranks[nod-1])) tensor = tensap.FullTensor(tmp, shape=shape) if root_rank_greater_than_one: perm = np.concatenate((np.arange(tensor.order-2), [tensor.order-1], [tensor.order-2])) tensor = tensor.transpose(perm) shape = shape[perm] rep_c = rep_c[:-1] nodes_x = np.hstack((nodes_x[rep_c], nod)) else: tensors[nod-1] = [] root_ch = tree.children(tree.root) rep = [np.nonzero(np.isin(nodes_x, x))[0][0] for x in root_ch] if root_rank_greater_than_one: rep = np.concatenate((rep, [tensor.order-1])) tensors[tree.root-1] = tensor.transpose(rep) out = tensap.TreeBasedTensor(tensors, tree) elif isinstance(tensor, tensap.TreeBasedTensor): if self._hsvd_type == 1: out = tensor.orth() gram = out.gramians()[0] mat = np.empty(gram.shape, dtype=object) shape = np.zeros(gram.shape) for nod in range(gram.size): # Truncation of the Gramian in trace norm for a control # of Frobenius norm of the tensor if gram[nod] is not None: self.max_rank = max_rank[nod] tmp = self.trunc_svd(gram[nod], local_tol ** 2) shape[nod] = tmp.core.shape[0] mat[nod] = np.transpose(tmp.space[0]) # Interior nodes without the root for level in np.arange(1, np.max(tree.level)): nod_level = tensap.fast_setdiff( tree.nodes_with_level(level), np.nonzero(tree.is_leaf)[0]+1) for nod in tree.nodes_indices[nod_level-1]: order = out.tensors[nod-1].order out.tensors[nod-1] = \ out.tensors[nod-1].tensor_matrix_product( mat[nod-1], order-1) parent = tree.parent(nod) ch_nb = tree.child_number(nod) out.tensors[parent-1] = \ out.tensors[parent-1].tensor_matrix_product( mat[nod-1], ch_nb-1) # Leaves for nod in tree.dim2ind: if out.is_active_node[nod-1]: order = out.tensors[nod-1].order out.tensors[nod-1] = \ out.tensors[nod-1].tensor_matrix_product( mat[nod-1], order-1) parent = tree.parent(nod) ch_nb = tree.child_number(nod) out.tensors[parent-1] = \ out.tensors[parent-1].tensor_matrix_product( mat[nod-1], ch_nb-1) # Update the shape out = out.update_attributes() out.is_orth = False elif self._hsvd_type == 2: out = tensor.orth() gram = out.gramians()[0] for level in np.arange(np.max(tree.level), 0, -1): for nod in tensap.fast_intersect( tree.nodes_with_level(level), out.active_nodes): # Truncation of the Gramian in trace norm for a control # of Frobenius norm of the tensor self.max_rank = max_rank[nod-1] tmp = self.trunc_svd(gram[nod-1], local_tol ** 2) tmp = np.transpose(tmp.space[0]) order = out.tensors[nod-1].order out.tensors[nod-1] = \ out.tensors[nod-1].tensor_matrix_product(tmp, order-1) parent = tree.parent(nod) ch_nb = tree.child_number(nod) out.tensors[parent-1] = out.tensors[parent-1].\ tensor_matrix_product(tmp, ch_nb-1) out = out.update_attributes() out.is_orth = True out.orth_node = tree.root else: raise ValueError('Wrong value of _hsvd_type.') else: raise NotImplementedError('Method not implemented.') return out
def parameter_gradient_eval(self, alpha, x=None, *args): ''' Compute the gradient of the function with respect to its alpha-th parameter, evaluated at some points. Parameters ---------- alpha : int The number of the parameter with respect to which compute the gradient of self. x : list or numpy.ndarray, optional The points at which the gradient is to be evaluated. The default is None, indicating to use self.bases if self.evaluated_bases is True. Raises ------ ValueError If no input points are provided. Returns ------- grad : Tensor The gradient of the function with respect to its alpha-th parameter, evaluated at some points. ''' if self.evaluated_bases: bases_eval = self.bases elif x is not None: bases_eval = self.bases.eval(x) else: raise ValueError('Must provide the evaluation points or the ' + 'bases evaluations.') dims = np.arange(self.tensor.order) if isinstance(self.tensor, tensap.TreeBasedTensor): # Compute fH, the TimesMatrixEvalDiag of f with bases_eval in all # the dimensions except the ones associated with alpha (if alpha # is a leaf node) or with the inactive children of alpha (if # alpha is an internal node). The tensor fH is used to compute # the gradient of f with respect to f.tensor.tensors[alpha-1]. tree = self.tensor.tree if tree.is_leaf[alpha - 1]: dims = dims[tree.dim2ind != alpha] else: children = tree.children(alpha) ind = tensap.fast_intersect( tree.dim2ind, children[np.logical_not( self.tensor.is_active_node[children - 1])]) dims = dims[np.logical_not(np.isin(tree.dim2ind, ind))] if np.all(self.tensor.is_active_node): fH = self.tensor.tensor_matrix_product( [bases_eval[x] for x in dims], dims) else: remaining_dims = np.arange(self.tensor.order) tensors = np.array(self.tensor.tensors) dim2ind = np.array(tree.dim2ind) for leaf in tensap.fast_intersect(tree.dim2ind[dims], self.tensor.active_nodes): dims = tensap.fast_setdiff( dims, np.nonzero(tree.dim2ind == leaf)[0][0]) tensors[leaf-1] = self.tensor.tensors[leaf-1].\ tensor_matrix_product(bases_eval[ np.nonzero(tree.dim2ind == leaf)[0][0]], 0) for pa in np.unique( tree.parent( tensap.fast_setdiff(tree.dim2ind[dims], self.tensor.active_nodes))): ind = tensap.fast_intersect(tree.dim2ind[dims], tree.children(pa)) ind = tensap.fast_setdiff(ind, self.tensor.active_nodes) dims_loc = np.array( [np.nonzero(x == tree.dim2ind)[0][0] for x in ind]) if len(ind) > 1: tensors[pa-1] = self.tensor.tensors[pa-1].\ tensor_matrix_product_eval_diag([bases_eval[x] for x in dims_loc], tree.child_number( ind)-1) remaining_dims = tensap.fast_setdiff( remaining_dims, dims_loc[1:]) if np.all( np.logical_not(self.tensor.is_active_node[ tree.children(pa) - 1])): dim2ind[dims_loc[0]] = tree.parent( tree.dim2ind[dims_loc[0]]) else: dims = tensap.fast_setdiff(dims, dims_loc[0]) dim2ind[dims_loc[1:]] = 0 perm = np.concatenate( ([tree.child_number(ind[0]) - 1], tensap.fast_setdiff( np.arange(tensors[pa - 1].order), tree.child_number(ind[0]) - 1))) tensors[pa - 1] = tensors[pa - 1].itranspose(perm) elif len(ind) == 1: dims = dims[dims != dims_loc] tensors[pa-1] = self.tensor.tensors[pa-1].\ tensor_matrix_product([bases_eval[x] for x in dims_loc], tree.child_number(ind)-1) dim2ind[dims_loc] = tree.dim2ind[dims_loc] keep_ind = tensap.fast_setdiff(np.arange(tree.nb_nodes), tree.dim2ind[dims] - 1) adj_mat = tree.adjacency_matrix[np.ix_(keep_ind, keep_ind)] dim2ind = dim2ind[dim2ind != 0] ind = np.zeros(tree.nb_nodes) ind[tensap.fast_setdiff(np.arange(tree.nb_nodes), keep_ind)] = 1 ind = np.cumsum(ind).astype(int) dim2ind -= ind[dim2ind - 1] alpha = alpha - ind[alpha - 1] tree = tensap.DimensionTree(dim2ind, adj_mat) fH = tensap.TreeBasedTensor(tensors[keep_ind], tree) fH = fH.remove_unique_children() bases_eval = [bases_eval[x] for x in remaining_dims] else: if alpha <= self.tensor.order: dims = np.delete(dims, alpha - 1) fH = self.tensor.tensor_matrix_product( [bases_eval[x] for x in dims], dims) grad = fH.parameter_gradient_eval_diag(alpha, bases_eval) if isinstance(self.tensor, tensap.TreeBasedTensor) and \ not tree.is_leaf[alpha-1]: # If the order of the children has been modified in grad, compute # the inverse permutation. ch = tree.children(alpha) perm_1 = np.argsort( np.concatenate((np.atleast_1d(ch[fH.is_active_node[ch - 1]]), np.atleast_1d(ch[np.logical_not( fH.is_active_node[ch - 1])])))) if alpha == tree.root: perm_2 = [] else: perm_2 = [fH.tensors[alpha - 1].order] if alpha != tree.root and self.tensor.ranks[tree.root - 1] > 1: perm_3 = [grad.order - 1] else: perm_3 = [] grad = grad.transpose( np.concatenate(([0], perm_1 + 1, perm_2, perm_3)).astype(int)) return grad