Ejemplo n.º 1
0
    def _sp_op(self, i, mat, h_list, mod_term, err=1.e-6):
        if not h_list:
            return np.zeros((mat.shape))

        logging.debug(__('> OP on mat {}...', i))

        n, m = mat.shape
        partial_transform = self._partial_transform
        a = self.get_sub_vec(-1)
        a_h = np.conj(a)
        density = self._partial_product(i, a, a_h)
        inv_density = linalg.inv(density + np.identity(m) * err)
        sp = self.get_sub_vec(i)
        sp_h = np.conj(np.transpose(sp))
        projection = np.identity(n) - np.dot(sp, sp_h)

        tmp = partial_transform(i, a, mat)
        for mat_j in h_list:
            tmp = partial_transform(i, tmp, mat_j)
        for j, mat_j in mod_term:
            if j != i:
                tmp = partial_transform(j, tmp, mat_j)
        tmp = self._partial_product(i, tmp, a_h)
        ans = np.dot(projection, np.dot(tmp, inv_density))

        return ans
Ejemplo n.º 2
0
 def autocorr(self,
              steps=None,
              ode_inter=0.01,
              split=False,
              imaginary=False,
              fast=False,
              start=0,
              move_energy=False):
     if not fast:
         _init = {}
         for t in self.root.visitor(leaf=False):
             _init[t] = t.array
     for time, r in self.propagator(steps=steps,
                                    ode_inter=ode_inter,
                                    split=split,
                                    imaginary=imaginary,
                                    start=start,
                                    move_energy=move_energy):
         for t in r.visitor(leaf=False):
             t.aux = t.array if fast else np.conj(_init[t])
         auto = r.global_inner_product()
         ans = (2. * time, auto) if fast else (time, auto)
         yield ans
         for t in r.visitor(leaf=False):
             t.aux = None
Ejemplo n.º 3
0
 def hilbert_angle(r1, r2):
     r"""Return :math:`\frac{<2|1>}{\sqrt{<1|1><2|2>}}.
     """
     for n1, n2 in zip(r1.visitor(leaf=False), r2.visitor(leaf=False)):
         n1.aux = np.conj(n2.array)
     angle = np.arccos(r1.global_inner_product() / (r1.global_norm() * r2.global_norm())) / np.pi * 180
     return angle
Ejemplo n.º 4
0
 def normalize(self, forced=False):
     """Normalize the array of self. Only work when self.normalized.
     Set `forced` to `True` to normalize any way.
     """
     array = self.array
     if array is None or (not self.normalized and not forced):
         return
     axis = self.axis
     if axis is None:
         norm = np.array(self.local_norm())
         self.set_array(array / norm)
         ans = norm
     else:
         norm = linalg.norm
         shape = self.shape
         dim = shape.pop(axis)
         array = np.reshape(np.moveaxis(array, axis, 0), (dim, -1))
         vecs = []
         norm_list = []
         for vec_i in array:
             for vec_j in vecs:
                 vec_i -= vec_j * np.dot(np.conj(vec_j), vec_i)
             norm_ = norm(vec_i)
             vecs.append(vec_i / norm_)
             norm_list.append(norm_)
         array = np.array(vecs)
         array = np.moveaxis(np.reshape(array, [-1] + shape), 0, axis)
         self.set_array(array)
         ans = norm_list
     return ans
Ejemplo n.º 5
0
 def _extend_space(self):
     head = len(self._search_space)
     self._search_space += self._trial_vecs
     self._column_space += list(map(self._matvec, self._trial_vecs))
     tail = len(self._search_space)
     v, a_v = self._search_space, self._column_space
     for i in range(head):
         for j in range(head, tail):
             self._submatrix[i, j] = np.dot(np.conj(v[i]), a_v[j])
             self._submatrix[j, i] = np.conj(self._submatrix[i, j])
     for i in range(head, tail):
         for j in range(head, i):
             self._submatrix[i, j] = np.dot(np.conj(v[i]), a_v[j])
             self._submatrix[j, i] = np.conj(self._submatrix[i, j])
         self._submatrix[i, i] = np.dot(np.conj(v[i]), a_v[i])
     return self._submatrix[:tail, :tail]
Ejemplo n.º 6
0
    def _calc_trial_vecs(self):
        if self._precondition is None:
            dim = len(self._search_space[0])
            self._precondition = self.davidson_precondition(
                dim, self._matvec
            )
        #     ritz_vecs = [None] * dim
        # else:
        #     ritz_vecs = list(self._get_ritz_vecs())

        self._trial_vecs = []
        precondition = self._precondition
        zipped = zip(
            self._residuals, self._residual_norms,
            self._get_ritz_vecs(), self._convergence
        )
        for residual, norm_, ritz_vec, conv in zipped:
            # remove linear dependency in self._residuals
            if norm_ ** 2 > self.lin_dep_lim and not conv:
                vec = precondition(
                    residual, self._ritz_vals[0], ritz_vec
                )
                vec *= 1. / norm(vec)
                for base in self._search_space:
                    vec -= np.dot(np.conj(base), vec) * base
                norm_ = norm(vec)
                # remove linear dependency between trial_vecs and
                # self._search_space
                if norm_ ** 2 > self.lin_dep_lim:
                    vec *= 1. / norm_
                    self._trial_vecs.append(vec)
        return self._trial_vecs
Ejemplo n.º 7
0
 def global_square(self):
     """Return <array|array>
     """
     for t in self.visitor(leaf=False):
         t.aux = np.conj(t.array)
     ans = self.global_inner_product()
     return ans
Ejemplo n.º 8
0
 def energy_expection(self, vec=None):
     if vec is None:
         vec = self.vec
     a_tensor = self.get_sub_vec(-1, vec)
     mod_terms = self.update_mod_terms(vec=vec, write=False)
     ans = 0.
     for r, term in enumerate(mod_terms):
         h_a = self._coeff_op(a_tensor, term)
         h_a = np.reshape(h_a, -1)
         a_h = np.conj(np.reshape(a_tensor, -1))
         ans += np.dot(a_h, h_a)
     return ans
Ejemplo n.º 9
0
 def log_inner_product(self, level=logging.DEBUG):
     root = self.root
     if logging.root.isEnabledFor(level):
         shape_dict = {}
         init = root.vectorize(shape_dict=shape_dict)
     try:
         yield self
     except:
         pass
     else:
         if logging.root.isEnabledFor(level):
             root.tensorize(np.conj(init), use_aux=True, shape_dict=shape_dict)
             ip = root.global_inner_product()
             logging.log(level, __("<|>:{}", ip))
Ejemplo n.º 10
0
    def _diff_k(self, k):
        c_k = self.corr.symm_coeff[k] + 1.0j * self.corr.asymm_coeff[k]
        numberer = self._numberer(k)
        raiser = self._raiser(k)
        lower = self._lower(k)

        return [
            [(self._i, -1.0j / self.hbar * np.transpose(self.op)), (k, lower)],
            [(self._j, 1.0j / self.hbar * self.op), (k, lower)],
            [(self._i, -1.0j / self.hbar * c_k * np.transpose(self.op)),
             (k, raiser @ numberer)],
            [(self._j, 1.0j / self.hbar * np.conj(c_k) * self.op),
             (k, raiser @ numberer)],
        ]
Ejemplo n.º 11
0
    def _diff_k(self, k):
        c_k = self.corr.symm_coeff[k] + 1.0j * self.corr.asymm_coeff[k]
        print("k: {}; c_k: {}".format(k, c_k))
        numberer = self._sqrt_numberer(k)
        raiser = self._raiser(k)
        lower = self._lower(k)

        return [
            [(self._i, -1.0j / self.hbar * self.op), (k, numberer @ lower)],
            [(self._j, 1.0j / self.hbar * self.op), (k, numberer @ lower)],
            [(self._i, -1.0j / self.hbar * c_k * self.op),
             (k, raiser @ numberer)],
            [(self._j, 1.0j / self.hbar * np.conj(c_k) * self.op),
             (k, raiser @ numberer)],
        ]
Ejemplo n.º 12
0
    def _single_eom(self, tensor, n, cache=False):
        """C.f. `Multi-Configuration Time Dependent Hartree Theory: a Tensor
        Network Perspective`, p38. This method does not contain the `i hbar`
        coefficient.

        Parameters
        ----------
        tensor : Tensor
            Must in a graph with all nodes' array set, including the leaves.
        n : int
            No. of Hamiltonian term.

        Return:
        -------
        array : ndarray
            With the same shape with tensor.shape.
        """
        partial_product = Tensor.partial_product
        partial_trace = Tensor.partial_trace
        partial_env = tensor.partial_env

        # Env Hamiltonians
        tmp = tensor.array
        for i in range(tensor.order):
            try:
                env_ = self.env_[(n, tensor, i)]
            except KeyError:
                env_ = partial_env(i, proper=True)
                if cache:
                    self.env_[(n, tensor, i)] = env_
            tmp = partial_product(tmp, i, env_)
        # For non-root nodes...
        if tensor.axis is not None:
            # Inversion
            axis, inv = tensor.axis, self.inv_density[tensor]
            tmp = partial_product(tmp, axis, inv)
            # Projection
            tmp_1 = np.array(tmp)
            array = tensor.array
            conj_array = np.conj(array)
            tmp = partial_trace(tmp, axis, conj_array, axis)
            tmp = partial_product(array, axis, tmp, j=1)
            tmp = (tmp_1 - tmp)
        return tmp
Ejemplo n.º 13
0
    def partial_env(self, i, proper=False, use_aux=False):
        """
        Parameters
        ----------
        i : {int, None}
        proper : bool
        use_aux : bool
            Whether to use self.aux as conj.
        Returns
        -------
        ans : {2-d ndarray, None}
        """
        if proper:  # Only calculate non-proper subtree directly
            # to support the Leaf
            child, j = self._access[i]
            return child.partial_env(j, proper=False, use_aux=use_aux)

        else:
            partial_product = Tensor.partial_product
            partial_trace = Tensor.partial_trace
            # Check the cache
            if i in self._partial_env and not use_aux:
                return self._partial_env[i]
            # Main algorithm
            else:
                env_ = [(i_, tensor.partial_env(j, proper=False, use_aux=use_aux))
                        for i_, tensor, j in self.children(axis=i)]  # Recursively
                # Make use of the normalization condition
                if not use_aux and i == self.axis and self.normalized and (all(args[1] is None for args in env_)):
                    ans = None
                else:
                    temp = self.array
                    for i_, matrix in env_:
                        temp = partial_product(temp, i_, matrix)
                    conj = self.aux if use_aux else np.conj(self.array)
                    ans = partial_trace(temp, i, conj, i)
                # Cache the answer and return
                if i is not None:
                    if not use_aux:
                        self._partial_env[i] = ans
                else:
                    ans = ans if ans is not None else 1.
                return ans
Ejemplo n.º 14
0
    def projector(self, comp=False):
        """[Deprecated] Return the projector corresponding to self.

        Returns
        -------
        ans : ndarray
        """
        axis = self.axis
        if axis is not None:
            array = self.array
            shape = self.shape
            dim = shape.pop(self.axis)
            comp_dim = np.prod(shape)
            array = np.moveaxis(array, axis, -1)
            array = np.reshape(array, (-1, dim))
            array_h = np.conj(np.transpose(array))
            ans = np.dot(array, array_h)
            if comp:
                identity = np.identity(comp_dim)
                ans = identity - ans
            ans = np.reshape(ans, shape * 2)
            return ans
        else:
            raise RuntimeError('Need to specific the normalization axis!')
Ejemplo n.º 15
0
 def _eval(op, vec):
     vec_h = np.conj(np.transpose(vec))
     mod_op = np.dot(vec_h, np.dot(op, vec))
     return mod_op
Ejemplo n.º 16
0
 def expection(self):
     """Return <array|H|array>
     """
     for t in self.visitor(leaf=False):
         t.aux = np.conj(t.array)
     return self.matrix_element()
Ejemplo n.º 17
0
 def local_norm(self):
     self.aux = np.conj(self.array)
     ans = self.local_inner_product()
     return np.sqrt(ans)