def test_node_order_spec(backend):
    node = Node(np.ones((2, 2)), backend=backend)
    result = ncon_interface.ncon([node, node], [(-1, 1), (1, -2)],
                                 out_order=[-1, -2],
                                 backend=backend)

    np.testing.assert_allclose(result.tensor, np.ones((2, 2)) * 2)
    result = ncon_interface.ncon([node, node], [(-1, 1), (1, -2)],
                                 con_order=[1],
                                 backend=backend)

    np.testing.assert_allclose(result.tensor, np.ones((2, 2)) * 2)

    result = ncon_interface.ncon([node, node], [(-1, 1), (1, -2)],
                                 con_order=[1],
                                 out_order=[-1, -2],
                                 backend=backend)

    np.testing.assert_allclose(result.tensor, np.ones((2, 2)) * 2)
示例#2
0
  def apply_one_site_gate(self, gate: Union[BaseNode, Tensor],
                          site: int) -> None:
    """Apply a one-site gate to an MPS. This routine will in general destroy
    any canonical form of the state. If a canonical form is needed, the user
    can restore it using `FiniteMPS.position`

    Args:
      gate: a one-body gate
      site: the site where the gate should be applied
    """
    if len(gate.shape) != 2:
      raise ValueError('rank of gate is {} but has to be 2'.format(
          len(gate.shape)))
    if site < 0 or site >= len(self):
      raise ValueError('site = {} is not between 0 <= site < N={}'.format(
          site, len(self)))
    gate_node = Node(gate, backend=self.backend)
    gate_node[1] ^ self.nodes[site][1]
    edge_order = [self.nodes[site][0], gate_node[0], self.nodes[site][2]]
    self.nodes[site] = contract_between(
        gate_node, self.nodes[site],
        name=self.nodes[site].name).reorder_edges(edge_order)
示例#3
0
    def from_tensor(cls,
                    tensor: Tensor,
                    out_axes: Sequence[int],
                    in_axes: Sequence[int],
                    backend: Optional[Text] = None) -> "QuOperator":
        """Construct a `QuOperator` directly from a single tensor.

    This first wraps the tensor in a `Node`, then constructs the `QuOperator`
    from that `Node`.

    Args:
      tensor: The tensor.
      out_axes: The axis indices of `tensor` to use as `out_edges`.
      in_axes: The axis indices of `tensor` to use as `in_edges`.
      backend: Optionally specify the backend to use for computations.
    Returns:
      The new operator.
    """
        n = Node(tensor, backend=backend)
        out_edges = [n[i] for i in out_axes]
        in_edges = [n[i] for i in in_axes]
        return cls(out_edges, in_edges, set([n]))
示例#4
0
def eliminate_identities(nodes: Collection[BaseNode]) -> Tuple[dict, dict]:
    """Eliminates any connected CopyNodes that are identity matrices.

  This will modify the network represented by `nodes`.
  Only identities that are connected to other nodes are eliminated.

  Args:
    nodes: Collection of nodes to search.
  Returns:
    nodes_dict: Dictionary mapping remaining Nodes to any replacements.
    dangling_edges_dict: Dictionary specifying all dangling-edge replacements.
  """
    nodes_dict = {}
    dangling_edges_dict = {}
    for n in nodes:
        if isinstance(n, CopyNode) and n.get_rank() == 2 and not (
                n[0].is_dangling() and n[1].is_dangling()):
            old_edges = [n[0], n[1]]
            _, new_edges = remove_node(n)
            if 0 in new_edges and 1 in new_edges:
                e = connect(new_edges[0], new_edges[1])
            elif 0 in new_edges:  # 1 was dangling
                dangling_edges_dict[old_edges[1]] = new_edges[0]
            elif 1 in new_edges:  # 0 was dangling
                dangling_edges_dict[old_edges[0]] = new_edges[1]
            else:
                # Trace of identity, so replace with a scalar node!
                d = n.get_dimension(0)
                # NOTE: Assume CopyNodes have numpy dtypes.
                nodes_dict[n] = Node(np.array(d, dtype=n.dtype),
                                     backend=n.backend)
        else:
            for e in n.get_all_dangling():
                dangling_edges_dict[e] = e
            nodes_dict[n] = n

    return nodes_dict, dangling_edges_dict
示例#5
0
    def from_tensor(cls,
                    tensor: Tensor,
                    subsystem_axes: Optional[Sequence[int]] = None,
                    backend: Optional[Text] = None) -> "QuAdjointVector":
        """Construct a `QuAdjointVector` directly from a single tensor.

    This first wraps the tensor in a `Node`, then constructs the
    `QuAdjointVector` from that `Node`.

    Args:
      tensor: The tensor.
      subsystem_axes: Sequence of integer indices specifying the order in which
        to interpret the axes as subsystems (input edges). If not specified,
        the axes are taken in ascending order.
      backend: Optionally specify the backend to use for computations.
    Returns:
      The new operator.
    """
        n = Node(tensor, backend=backend)
        if subsystem_axes is not None:
            subsystem_edges = [n[i] for i in subsystem_axes]
        else:
            subsystem_edges = n.get_all_edges()
        return cls(subsystem_edges)
def conj(node: BaseNode,
         name: Optional[Text] = None,
         axis_names: Optional[List[Text]] = None) -> BaseNode:
    """Conjugate `node`
  Args:
    node: A `BaseNode`. 
    name: Optional name to give the new node.
    axis_names: Optional list of names for the axis.
  Returns:
    A new node. The complex conjugate of `node`.
  Raises:
    TypeError: If `node` has no `backend` attribute.
  """
    if not hasattr(node, 'backend'):
        raise TypeError('Node {} of type {} has no `backend`'.format(
            node, type(node)))
    backend = node.backend
    if not axis_names:
        axis_names = node.axis_names

    return Node(backend.conj(node.tensor),
                name=name,
                axis_names=axis_names,
                backend=backend.name)
示例#7
0
def copy(nodes: Iterable[BaseNode],
         conjugate: bool = False) -> Tuple[dict, dict]:
  """Copy the given nodes and their edges.

  This will return a dictionary linking original nodes/edges 
  to their copies. If nodes A and B are connected but only A is passed in to be
  copied, the edge between them will become a dangling edge.

  Args:
    nodes: An `Iterable` (Usually a `List` or `Set`) of `Nodes`.
    conjugate: Boolean. Whether to conjugate all of the nodes
        (useful for calculating norms and reduced density
        matrices).

  Returns:
    A tuple containing:
      node_dict: A dictionary mapping the nodes to their copies.
      edge_dict: A dictionary mapping the edges to their copies.
  """
  #TODO: add support for copying CopyTensor
  if conjugate:
    node_dict = {
        node: Node(
            node.backend.conj(node.tensor),
            name=node.name,
            axis_names=node.axis_names,
            backend=node.backend) for node in nodes
    }
  else:
    node_dict = {
        node: Node(
            node.tensor,
            name=node.name,
            axis_names=node.axis_names,
            backend=node.backend) for node in nodes
    }
  edge_dict = {}
  for edge in get_all_edges(nodes):
    node1 = edge.node1
    axis1 = edge.node1.get_axis_number(edge.axis1)
    # edge dangling or node2 does not need to be copied
    if edge.is_dangling() or edge.node2 not in node_dict:
      new_edge = Edge(node_dict[node1], axis1, edge.name)
      node_dict[node1].add_edge(new_edge, axis1)
      edge_dict[edge] = new_edge
      continue

    node2 = edge.node2
    axis2 = edge.node2.get_axis_number(edge.axis2)
    # copy node2 but not node1
    if node1 not in node_dict:
      new_edge = Edge(node_dict[node2], axis2, edge.name)
      node_dict[node2].add_edge(new_edge, axis2)
      edge_dict[edge] = new_edge
      continue

    # both nodes should be copied
    new_edge = Edge(node_dict[node1], axis1, edge.name, node_dict[node2], axis2)
    new_edge.set_signature(edge.signature)
    node_dict[node2].add_edge(new_edge, axis2)
    node_dict[node1].add_edge(new_edge, axis1)
    edge_dict[edge] = new_edge

  return node_dict, edge_dict
示例#8
0
def split_node(
    node: BaseNode,
    left_edges: List[Edge],
    right_edges: List[Edge],
    max_singular_values: Optional[int] = None,
    max_truncation_err: Optional[float] = None,
    relative: Optional[bool] = False,
    left_name: Optional[Text] = None,
    right_name: Optional[Text] = None,
    edge_name: Optional[Text] = None,
) -> Tuple[BaseNode, BaseNode, Tensor]:
  """Split a `node` using Singular Value Decomposition.

  Let :math:`M` be the matrix created by flattening `left_edges` and 
  `right_edges` into 2 axes. 
  Let :math:`U S V^* = M` be the SVD of :math:`M`. 
  This will split the network into 2 nodes. 
  The left node's tensor will be :math:`U \\sqrt{S}` 
  and the right node's tensor will be
  :math:`\\sqrt{S} V^*` where :math:`V^*` is the adjoint of :math:`V`.

  The singular value decomposition is truncated if `max_singular_values` or
  `max_truncation_err` is not `None`.

  The truncation error is the 2-norm of the vector of truncated singular
  values. If only `max_truncation_err` is set, as many singular values will
  be truncated as possible while maintaining:
  `norm(truncated_singular_values) <= max_truncation_err`.
  If `relative` is set `True` then `max_truncation_err` is understood
  relative to the largest singular value.

  If only `max_singular_values` is set, the number of singular values kept
  will be `min(max_singular_values, number_of_singular_values)`, so that
  `max(0, number_of_singular_values - max_singular_values)` are truncated.

  If both `max_truncation_err` and `max_singular_values` are set,
  `max_singular_values` takes priority: The truncation error may be larger
  than `max_truncation_err` if required to satisfy `max_singular_values`.

  Args:
    node: The node you want to split.
    left_edges: The edges you want connected to the new left node.
    right_edges: The edges you want connected to the new right node.
    max_singular_values: The maximum number of singular values to keep.
    max_truncation_err: The maximum allowed truncation error.
    relative: Multiply `max_truncation_err` with the largest singular value.
    left_name: The name of the new left node. If `None`, a name will be 
      generated automatically.
    right_name: The name of the new right node. If `None`, a name will be
      generated automatically.
    edge_name: The name of the new `Edge` connecting the new left and
      right node. If `None`, a name will be generated automatically.
      The new axis will get the same name as the edge.

  Returns:
    A tuple containing:
      left_node:
        A new node created that connects to all of the `left_edges`.
        Its underlying tensor is :math:`U \\sqrt{S}`
      right_node:
        A new node created that connects to all of the `right_edges`.
        Its underlying tensor is :math:`\\sqrt{S} V^*`
      truncated_singular_values:
        The vector of truncated singular values.
  Raises:
    AttributeError: If `node` has no backend attribute
  """

  if not hasattr(node, 'backend'):
    raise AttributeError('Node {} of type {} has no `backend`'.format(
        node, type(node)))

  if node.axis_names and edge_name:
    left_axis_names = []
    right_axis_names = [edge_name]
    for edge in left_edges:
      left_axis_names.append(node.axis_names[edge.axis1] if edge.node1 is node
                             else node.axis_names[edge.axis2])
    for edge in right_edges:
      right_axis_names.append(node.axis_names[edge.axis1] if edge.node1 is node
                              else node.axis_names[edge.axis2])
    left_axis_names.append(edge_name)
  else:
    left_axis_names = None
    right_axis_names = None

  backend = node.backend
  transp_tensor = node.tensor_from_edge_order(left_edges + right_edges)

  u, s, vh, trun_vals = backend.svd_decomposition(
      transp_tensor,
      len(left_edges),
      max_singular_values,
      max_truncation_err,
      relative=relative)
  sqrt_s = backend.sqrt(s)
  u_s = backend.broadcast_right_multiplication(u, sqrt_s)
  vh_s = backend.broadcast_left_multiplication(sqrt_s, vh)

  left_node = Node(
      u_s, name=left_name, axis_names=left_axis_names, backend=backend)

  left_axes_order = [
      edge.axis1 if edge.node1 is node else edge.axis2 for edge in left_edges
  ]
  for i, edge in enumerate(left_edges):
    left_node.add_edge(edge, i)
    edge.update_axis(left_axes_order[i], node, i, left_node)

  right_node = Node(
      vh_s, name=right_name, axis_names=right_axis_names, backend=backend)

  right_axes_order = [
      edge.axis1 if edge.node1 is node else edge.axis2 for edge in right_edges
  ]
  for i, edge in enumerate(right_edges):
    # i + 1 to account for the new edge.
    right_node.add_edge(edge, i + 1)
    edge.update_axis(right_axes_order[i], node, i + 1, right_node)

  connect(left_node.edges[-1], right_node.edges[0], name=edge_name)
  node.fresh_edges(node.axis_names)
  return left_node, right_node, trun_vals
示例#9
0
def split_node_qr(
    node: BaseNode,
    left_edges: List[Edge],
    right_edges: List[Edge],
    left_name: Optional[Text] = None,
    right_name: Optional[Text] = None,
    edge_name: Optional[Text] = None,
) -> Tuple[BaseNode, BaseNode]:
  """Split a `node` using QR decomposition.

  Let :math:`M` be the matrix created by 
  flattening `left_edges` and `right_edges` into 2 axes. 
  Let :math:`QR = M` be the QR Decomposition of :math:`M`.
  This will split the network into 2 nodes.
  The `left node`'s tensor will be :math:`Q` (an orthonormal matrix)
  and the `right node`'s tensor will be :math:`R` (an upper triangular matrix)

  Args:
    node: The node you want to split.
    left_edges: The edges you want connected to the new left node.
    right_edges: The edges you want connected to the new right node.
    left_name: The name of the new left node. If `None`, a name will be
      generated automatically.
    right_name: The name of the new right node. If `None`, a name will be
      generated automatically.
    edge_name: The name of the new `Edge` connecting the new left and right
      node. If `None`, a name will be generated automatically.

  Returns:
    A tuple containing:
      left_node:
        A new node created that connects to all of the `left_edges`.
        Its underlying tensor is :math:`Q`
      right_node:
        A new node created that connects to all of the `right_edges`.
        Its underlying tensor is :math:`R`
  Raises:
    AttributeError: If `node` has no backend attribute
  """
  if not hasattr(node, 'backend'):
    raise AttributeError('Node {} of type {} has no `backend`'.format(
        node, type(node)))

  if node.axis_names and edge_name:
    left_axis_names = []
    right_axis_names = [edge_name]
    for edge in left_edges:
      left_axis_names.append(node.axis_names[edge.axis1] if edge.node1 is node
                             else node.axis_names[edge.axis2])
    for edge in right_edges:
      right_axis_names.append(node.axis_names[edge.axis1] if edge.node1 is node
                              else node.axis_names[edge.axis2])
    left_axis_names.append(edge_name)
  else:
    left_axis_names = None
    right_axis_names = None

  backend = node.backend
  transp_tensor = node.tensor_from_edge_order(left_edges + right_edges)

  q, r = backend.qr_decomposition(transp_tensor, len(left_edges))
  left_node = Node(
      q, name=left_name, axis_names=left_axis_names, backend=backend)

  left_axes_order = [
      edge.axis1 if edge.node1 is node else edge.axis2 for edge in left_edges
  ]
  for i, edge in enumerate(left_edges):
    left_node.add_edge(edge, i)
    edge.update_axis(left_axes_order[i], node, i, left_node)

  right_node = Node(
      r, name=right_name, axis_names=right_axis_names, backend=backend)

  right_axes_order = [
      edge.axis1 if edge.node1 is node else edge.axis2 for edge in right_edges
  ]
  for i, edge in enumerate(right_edges):
    # i + 1 to account for the new edge.
    right_node.add_edge(edge, i + 1)
    edge.update_axis(right_axes_order[i], node, i + 1, right_node)

  connect(left_node.edges[-1], right_node.edges[0], name=edge_name)
  node.fresh_edges(node.axis_names)

  return left_node, right_node
示例#10
0
    def measure_two_body_correlator(self, op1: Tensor, op2: Tensor, site1: int,
                                    sites2: Sequence[int]) -> np.ndarray:
        """
    Commpute the correlator <op1,op2> between `site1` and all sites in `s` in 
    `sites2`. if `site1 == s`, op2 will be applied first
    Args:
      op1, op2: Tensors of rank 2; the local operators to be measured
      site1: the site where `op1`  acts
      sites2: sites where `op2` acts.
    Returns:
      List: correlator <op1, op2>
    Raises:
      ValueError if `site1` is out of range
    """
        N = len(self)
        if site1 < 0:
            raise ValueError(
                "Site site1 out of range: {} not between 0 <= site < N = {}.".
                format(site1, N))
        sites2 = np.array(sites2)  #enable logical indexing

        # we break the computation into two parts:
        # first we get all correlators <op2(site2) op1(site1)> with site2 < site1
        # then all correlators <op1(site1) op2(site2)> with site1 >= site1

        # get all sites smaller than site1
        left_sites = sorted(sites2[sites2 < site1])
        # get all sites larger than site1
        right_sites = sorted(sites2[sites2 > site1])

        # compute all neccessary right reduced
        # density matrices in one go. This is
        # more efficient than calling right_envs
        # for each site individually
        if right_sites:
            right_sites_mod = list({n % N for n in right_sites})
            rs = self.right_envs([site1] + right_sites_mod)
        c = []
        if left_sites:

            left_sites_mod = list({n % N for n in left_sites})

            ls = self.left_envs(left_sites_mod + [site1])
            A = Node(self.nodes[site1], backend=self.backend.name)
            O1 = Node(op1, backend=self.backend.name)
            conj_A = conj(A)
            R = rs[site1]
            R[0] ^ A[2]
            R[1] ^ conj_A[2]
            A[1] ^ O1[1]
            conj_A[1] ^ O1[0]
            R = ((R @ A) @ O1) @ conj_A
            n1 = np.min(left_sites)
            #          -- A--------
            #             |        |
            # compute   op1(site1) |
            #             |        |
            #          -- A*-------
            # and evolve it to the left by contracting tensors at site2 < site1
            # if site2 is in `sites2`, calculate the observable
            #
            #  ---A--........-- A--------
            # |   |             |        |
            # |  op2(site2)    op1(site1)|
            # |   |             |        |
            #  ---A--........-- A*-------

            for n in range(site1 - 1, n1 - 1, -1):
                if n in left_sites:
                    A = Node(self.nodes[n % N], backend=self.backend.name)
                    conj_A = conj(A)
                    O2 = Node(op2, backend=self.backend.name)
                    L = ls[n % N]
                    L[0] ^ A[0]
                    L[1] ^ conj_A[0]
                    O2[0] ^ conj_A[1]
                    O2[1] ^ A[1]
                    R[0] ^ A[2]
                    R[1] ^ conj_A[2]

                    res = (((L @ A) @ O2) @ conj_A) @ R
                    c.append(res.tensor)
                if n > n1:
                    R = self.apply_transfer_operator(n % N, 'right', R)

            c = list(reversed(c))

        # compute <op2(site1)op1(site1)>
        if site1 in sites2:
            O1 = Node(op1, backend=self.backend.name)
            O2 = Node(op2, backend=self.backend.name)
            L = ls[site1]
            R = rs[site1]
            A = Node(self.nodes[site1], backend=self.backend.name)
            conj_A = conj(A)

            O1[1] ^ O2[0]
            L[0] ^ A[0]
            L[1] ^ conj_A[0]
            R[0] ^ A[2]
            R[1] ^ conj_A[2]
            A[1] ^ O2[1]
            conj_A[1] ^ O1[0]
            O = O1 @ O2
            res = (((L @ A) @ O) @ conj_A) @ R
            c.append(res.tensor)

        # compute <op1(site1) op2(site2)> for site1 < site2
        right_sites = sorted(sites2[sites2 > site1])
        if right_sites:
            A = Node(self.nodes[site1], backend=self.backend.name)
            conj_A = conj(A)
            L = ls[site1]
            O1 = Node(op1, backend=self.backend.name)
            L[0] ^ A[0]
            L[1] ^ conj_A[0]
            A[1] ^ O1[1]
            conj_A[1] ^ O1[0]
            L = L @ A @ O1 @ conj_A
            n2 = np.max(right_sites)
            #          -- A--
            #         |   |
            # compute | op1(site1)
            #         |   |
            #          -- A*--
            # and evolve it to the right by contracting tensors at site2 > site1
            # if site2 is in `sites2`, calculate the observable
            #
            #  ---A--........-- A--------
            # |   |             |        |
            # |  op1(site1)    op2(site2)|
            # |   |             |        |
            #  ---A--........-- A*-------

            for n in range(site1 + 1, n2 + 1):
                if n in right_sites:
                    R = rs[n % N]
                    A = Node(self.nodes[n % N], backend=self.backend.name)
                    conj_A = conj(A)
                    O2 = Node(op2, backend=self.backend.name)
                    A[0] ^ L[0]
                    conj_A[0] ^ L[1]
                    O2[0] ^ conj_A[1]
                    O2[1] ^ A[1]
                    R[0] ^ A[2]
                    R[1] ^ conj_A[2]
                    res = L @ A @ O2 @ conj_A @ R
                    c.append(res.tensor)

                if n < n2:
                    L = self.apply_transfer_operator(n % N, 'left', L)
        return np.array(c)
示例#11
0
    def canonicalize(self,
                     left_initial_state: Optional[Union[BaseNode,
                                                        Tensor]] = None,
                     right_initial_state: Optional[Union[BaseNode,
                                                         Tensor]] = None,
                     precision: Optional[float] = 1E-10,
                     truncation_threshold: Optional[float] = 1E-15,
                     D: Optional[int] = None,
                     num_krylov_vecs: Optional[int] = 50,
                     maxiter: Optional[int] = 1000,
                     pseudo_inverse_cutoff: Optional[float] = None):
        """Canonicalize an InfiniteMPS (i.e. bring it into Schmidt-canonical form).

    Args:
      left_initial_state: An initial guess for the left eigenvector of
        the unit-cell mps transfer matrix
      right_initial_state: An initial guess for the right eigenvector of
        the unit-cell transfer matrix
      precision: The desired precision of the dominant eigenvalues (passed
        to InfiniteMPS.transfer_matrix_eigs)
      truncation_threshold: Truncation threshold for Schmidt-values at the
        boundaries of the mps.
      D: The maximum number of Schmidt values to be kept at the boundaries
        of the mps.
      num_krylov_vecs: Number of Krylov vectors to diagonalize transfer_matrix
      maxiter: Maximum number of iterations in `eigs`
      pseudo_inverse_cutoff: A cutoff for taking the Moore-Penrose pseudo-inverse
        of a matrix. Given the SVD of a matrix :math:`M=U S V`, the inverse is
        is computed as :math:`V^* S^{-1}_+ U^*`, where :math:`S^{-1}_+` equals
        `S^{-1}` for all values in `S` which are larger than `pseudo_inverse_cutoff`,
         and is 0 for all others.
    Returns:
      None
    """
        # bring center_position to 0
        self.position(0)
        # dtype of eta is the same as InfiniteMPS.dtype
        # this is assured in the backend.
        eta, l = self.transfer_matrix_eigs(direction='left',
                                           initial_state=left_initial_state,
                                           precision=precision,
                                           num_krylov_vecs=num_krylov_vecs,
                                           maxiter=maxiter)
        sqrteta = self.backend.sqrt(eta)
        self.nodes[0].tensor /= sqrteta

        # TODO: would be nice to do the algebra directly on the nodes here
        l.tensor /= self.backend.trace(l.tensor)
        l.tensor = (l.tensor +
                    self.backend.transpose(self.backend.conj(l.tensor),
                                           (1, 0))) / 2.0

        # eigvals_left and u_left are both `Tensor` objects
        eigvals_left, u_left = self.backend.eigh(l.tensor)
        eigvals_left /= self.backend.norm(eigvals_left)
        if pseudo_inverse_cutoff:
            mask = eigvals_left <= pseudo_inverse_cutoff

        inveigvals_left = 1.0 / eigvals_left
        if pseudo_inverse_cutoff:
            inveigvals_left = self.backend.index_update(
                inveigvals_left, mask, 0.0)

        sqrtl = Node(
            ncon([u_left,
                  self.backend.diag(self.backend.sqrt(eigvals_left))],
                 [[-2, 1], [1, -1]],
                 backend=self.backend.name),
            backend=self.backend)
        inv_sqrtl = Node(ncon([
            self.backend.diag(self.backend.sqrt(inveigvals_left)),
            self.backend.conj(u_left)
        ], [[-2, 1], [-1, 1]],
                              backend=self.backend.name),
                         backend=self.backend)

        eta, r = self.transfer_matrix_eigs(direction='right',
                                           initial_state=right_initial_state,
                                           precision=precision,
                                           num_krylov_vecs=num_krylov_vecs,
                                           maxiter=maxiter)

        r.tensor /= self.backend.trace(r.tensor)
        r.tensor = (r.tensor +
                    self.backend.transpose(self.backend.conj(r.tensor),
                                           (1, 0))) / 2.0
        # eigvals_left and u_left are both `Tensor` objects
        eigvals_right, u_right = self.backend.eigh(r.tensor)
        eigvals_right /= self.backend.norm(eigvals_right)
        if pseudo_inverse_cutoff:
            mask = eigvals_right <= pseudo_inverse_cutoff

        inveigvals_right = 1.0 / eigvals_right
        if pseudo_inverse_cutoff:
            inveigvals_right = self.backend.index_update(
                inveigvals_right, mask, 0.0)

        sqrtr = Node(ncon(
            [u_right,
             self.backend.diag(self.backend.sqrt(eigvals_right))],
            [[-1, 1], [1, -2]],
            backend=self.backend.name),
                     backend=self.backend)

        inv_sqrtr = Node(ncon([
            self.backend.diag(self.backend.sqrt(inveigvals_right)),
            self.backend.conj(u_right)
        ], [[-1, 1], [-2, 1]],
                              backend=self.backend.name),
                         backend=self.backend)

        tmp = Node(ncon([sqrtl, sqrtr], [[-1, 1], [1, -2]],
                        backend=self.backend.name),
                   backend=self.backend)
        U, lam, V, _ = split_node_full_svd(
            tmp, [tmp[0]], [tmp[1]],
            max_singular_values=D,
            max_truncation_err=truncation_threshold)
        # absorb lam*V*invx into the left-most mps tensor
        self.nodes[0] = ncon([lam, V, inv_sqrtr, self.nodes[0]],
                             [[-1, 1], [1, 2], [2, 3], [3, -2, -3]])

        # absorb connector * inv_sqrtl * U * lam into the right-most tensor
        # Note that lam is absorbed here, which means that the state
        # is in the parallel decomposition
        # Note that we absorb connector_matrix here
        self.nodes[-1] = ncon(
            [self.get_node(len(self) - 1), inv_sqrtl, U, lam],
            [[-1, -2, 1], [1, 2], [2, 3], [3, -3]])
        # now do a sweep of QR decompositions to bring the mps tensors into
        # left canonical form (except the last one)
        self.position(len(self) - 1)
        # TODO: lam is a diagonal matrix, but we're not making
        # use of it the moment
        lam_norm = self.backend.norm(lam.tensor)
        lam.tensor /= lam_norm
        self.center_position = len(self) - 1
        self.connector_matrix = Node(self.backend.inv(lam.tensor),
                                     backend=self.backend)

        return lam_norm
def split_node(
    node: BaseNode,
    left_edges: List[Edge],
    right_edges: List[Edge],
    max_singular_values: Optional[int] = None,
    max_truncation_err: Optional[float] = None,
    left_name: Optional[Text] = None,
    right_name: Optional[Text] = None,
    edge_name: Optional[Text] = None,
) -> Tuple[BaseNode, BaseNode, Tensor]:
    """Split a `Node` using Singular Value Decomposition.

  Let M be the matrix created by flattening left_edges and right_edges into
  2 axes. Let :math:`U S V^* = M` be the Singular Value Decomposition of 
  :math:`M`. This will split the network into 2 nodes. The left node's 
  tensor will be :math:`U \\sqrt{S}` and the right node's tensor will be 
  :math:`\\sqrt{S} V^*` where :math:`V^*` is
  the adjoint of :math:`V`.

  The singular value decomposition is truncated if `max_singular_values` or
  `max_truncation_err` is not `None`.

  The truncation error is the 2-norm of the vector of truncated singular
  values. If only `max_truncation_err` is set, as many singular values will
  be truncated as possible while maintaining:
  `norm(truncated_singular_values) <= max_truncation_err`.

  If only `max_singular_values` is set, the number of singular values kept
  will be `min(max_singular_values, number_of_singular_values)`, so that
  `max(0, number_of_singular_values - max_singular_values)` are truncated.

  If both `max_truncation_err` and `max_singular_values` are set,
  `max_singular_values` takes priority: The truncation error may be larger
  than `max_truncation_err` if required to satisfy `max_singular_values`.

  Args:
    node: The node you want to split.
    left_edges: The edges you want connected to the new left node.
    right_edges: The edges you want connected to the new right node.
    max_singular_values: The maximum number of singular values to keep.
    max_truncation_err: The maximum allowed truncation error.
    left_name: The name of the new left node. If `None`, a name will be 
      generated automatically.
    right_name: The name of the new right node. If `None`, a name will be 
      genenerated automatically.
    edge_name: The name of the new `Edge` connecting the new left and 
      right node. If `None`, a name will be generated automatically. 
      The new axis will get the same name as the edge.

  Returns:
    A tuple containing:
      left_node: 
        A new node created that connects to all of the `left_edges`.
        Its underlying tensor is :math:`U \\sqrt{S}`
      right_node: 
        A new node created that connects to all of the `right_edges`.
        Its underlying tensor is :math:`\\sqrt{S} V^*`
      truncated_singular_values: 
        The vector of truncated singular values.
  """

    if not hasattr(node, 'backend'):
        raise TypeError('Node {} of type {} has no `backend`'.format(
            node, type(node)))

    if node.axis_names and edge_name:
        left_axis_names = []
        right_axis_names = [edge_name]
        for edge in left_edges:
            left_axis_names.append(node.axis_names[edge.axis1] if edge.node1 is
                                   node else node.axis_names[edge.axis2])
        for edge in right_edges:
            right_axis_names.append(node.axis_names[edge.axis1] if edge.node1
                                    is node else node.axis_names[edge.axis2])
        left_axis_names.append(edge_name)
    else:
        left_axis_names = None
        right_axis_names = None

    backend = node.backend
    node.reorder_edges(left_edges + right_edges)

    u, s, vh, trun_vals = backend.svd_decomposition(node.tensor,
                                                    len(left_edges),
                                                    max_singular_values,
                                                    max_truncation_err)
    sqrt_s = backend.sqrt(s)
    u_s = u * sqrt_s
    # We have to do this since we are doing element-wise multiplication against
    # the first axis of vh. If we don't, it's possible one of the other axes of
    # vh will be the same size as sqrt_s and would multiply across that axis
    # instead, which is bad.
    sqrt_s_broadcast_shape = backend.concat(
        [backend.shape(sqrt_s), [1] * (len(vh.shape) - 1)], axis=-1)
    vh_s = vh * backend.reshape(sqrt_s, sqrt_s_broadcast_shape)
    left_node = Node(u_s,
                     name=left_name,
                     axis_names=left_axis_names,
                     backend=backend.name)
    for i, edge in enumerate(left_edges):
        left_node.add_edge(edge, i)
        edge.update_axis(i, node, i, left_node)
    right_node = Node(vh_s,
                      name=right_name,
                      axis_names=right_axis_names,
                      backend=backend.name)
    for i, edge in enumerate(right_edges):
        # i + 1 to account for the new edge.
        right_node.add_edge(edge, i + 1)
        edge.update_axis(i + len(left_edges), node, i + 1, right_node)
    connect(left_node.edges[-1], right_node.edges[0], name=edge_name)
    node.fresh_edges(node.axis_names)
    return left_node, right_node, trun_vals
def test_node_axis_names_setter_throws_value_error():
  n1 = Node(np.eye(2), axis_names=['a', 'b'])
  with pytest.raises(TypeError):
    n1.axis_names = [0, 1]
def split_node_rq(
    node: BaseNode,
    left_edges: List[Edge],
    right_edges: List[Edge],
    left_name: Optional[Text] = None,
    right_name: Optional[Text] = None,
    edge_name: Optional[Text] = None,
) -> Tuple[BaseNode, BaseNode]:
    """Split a `Node` using RQ (reversed QR) decomposition

  Let M be the matrix created by flattening left_edges and right_edges into
  2 axes. Let :math:`QR = M^*` be the QR Decomposition of
  :math:`M^*`. This will split the network into 2 nodes. The left node's
  tensor will be :math:`R^*` (a lower triangular matrix) and the right 
    node's tensor will be :math:`Q^*` (an orthonormal matrix)

  Args:
    node: The node you want to split.
    left_edges: The edges you want connected to the new left node.
    right_edges: The edges you want connected to the new right node.
    left_name: The name of the new left node. If `None`, a name will be 
      generated automatically.
    right_name: The name of the new right node. If `None`, a name will be 
      generated automatically.
    edge_name: The name of the new `Edge` connecting the new left and 
      right node. If `None`, a name will be generated automatically.

  Returns:
    A tuple containing:
      left_node:
        A new node created that connects to all of the `left_edges`.
        Its underlying tensor is :math:`Q`
      right_node:
        A new node created that connects to all of the `right_edges`.
        Its underlying tensor is :math:`R`
  """
    if not hasattr(node, 'backend'):
        raise TypeError('Node {} of type {} has no `backend`'.format(
            node, type(node)))

    if node.axis_names and edge_name:
        left_axis_names = []
        right_axis_names = [edge_name]
        for edge in left_edges:
            left_axis_names.append(node.axis_names[edge.axis1] if edge.node1 is
                                   node else node.axis_names[edge.axis2])
        for edge in right_edges:
            right_axis_names.append(node.axis_names[edge.axis1] if edge.node1
                                    is node else node.axis_names[edge.axis2])
        left_axis_names.append(edge_name)
    else:
        left_axis_names = None
        right_axis_names = None
    backend = node.backend
    node.reorder_edges(left_edges + right_edges)
    r, q = backend.rq_decomposition(node.tensor, len(left_edges))
    left_node = Node(r,
                     name=left_name,
                     axis_names=left_axis_names,
                     backend=backend.name)
    for i, edge in enumerate(left_edges):
        left_node.add_edge(edge, i)
        edge.update_axis(i, node, i, left_node)
    right_node = Node(q,
                      name=right_name,
                      axis_names=right_axis_names,
                      backend=backend.name)
    for i, edge in enumerate(right_edges):
        # i + 1 to account for the new edge.
        right_node.add_edge(edge, i + 1)
        edge.update_axis(i + len(left_edges), node, i + 1, right_node)
    connect(left_node.edges[-1], right_node.edges[0], name=edge_name)
    return left_node, right_node
示例#15
0
    def measure_two_body_correlator(self, op1: Tensor, op2: Tensor, site1: int,
                                    sites2: Sequence[int]) -> List:
        """
    Compute the correlator
    :math:`\\langle` `op1[site1], op2[s]`:math:`\\rangle`
    between `site1` and all sites `s` in `sites2`. If `s == site1`,
    `op2[s]` will be applied first.

    Args:
      op1: Tensor of rank 2; the local operator at `site1`.
      op2: Tensor of rank 2; the local operator at `sites2`.
      site1: The site where `op1`  acts
      sites2: Sites where operator `op2` acts.
    Returns:
      List: Correlator :math:`\\langle` `op1[site1], op2[s]`:math:`\\rangle`
        for `s` :math:`\\in` `sites2`.
    Raises:
      ValueError if `site1` is out of range
    """
        N = len(self)
        if site1 < 0:
            raise ValueError(
                "Site site1 out of range: {} not between 0 <= site < N = {}.".
                format(site1, N))
        sites2 = np.array(sites2)  #enable logical indexing

        # we break the computation into two parts:
        # first we get all correlators <op2(site2) op1(site1)> with site2 < site1
        # then all correlators <op1(site1) op2(site2)> with site2 >= site1

        # get all sites smaller than site1
        left_sites = np.sort(sites2[sites2 < site1])
        # get all sites larger than site1
        right_sites = np.sort(sites2[sites2 > site1])

        # compute all neccessary right reduced
        # density matrices in one go. This is
        # more efficient than calling right_envs
        # for each site individually
        rs = self.right_envs(
            np.append(site1, np.mod(right_sites, N)).astype(np.int64))
        ls = self.left_envs(
            np.append(np.mod(left_sites, N), site1).astype(np.int64))

        c = []
        if len(left_sites) > 0:

            A = Node(self.tensors[site1], backend=self.backend)
            O1 = Node(op1, backend=self.backend)
            conj_A = conj(A)
            R = Node(rs[site1], backend=self.backend)
            R[0] ^ A[2]
            R[1] ^ conj_A[2]
            A[1] ^ O1[1]
            conj_A[1] ^ O1[0]
            R = ((R @ A) @ O1) @ conj_A
            n1 = np.min(left_sites)
            #          -- A--------
            #             |        |
            # compute   op1(site1) |
            #             |        |
            #          -- A*-------
            # and evolve it to the left by contracting tensors at site2 < site1
            # if site2 is in `sites2`, calculate the observable
            #
            #  ---A--........-- A--------
            # |   |             |        |
            # |  op2(site2)    op1(site1)|
            # |   |             |        |
            #  ---A--........-- A*-------

            for n in range(site1 - 1, n1 - 1, -1):
                if n in left_sites:
                    A = Node(self.tensors[n % N], backend=self.backend)
                    conj_A = conj(A)
                    O2 = Node(op2, backend=self.backend)
                    L = Node(ls[n % N], backend=self.backend)
                    L[0] ^ A[0]
                    L[1] ^ conj_A[0]
                    O2[0] ^ conj_A[1]
                    O2[1] ^ A[1]
                    R[0] ^ A[2]
                    R[1] ^ conj_A[2]

                    res = (((L @ A) @ O2) @ conj_A) @ R
                    c.append(res.tensor)
                if n > n1:
                    R = Node(self.apply_transfer_operator(
                        n % N, 'right', R.tensor),
                             backend=self.backend)

            c = list(reversed(c))

        # compute <op2(site1)op1(site1)>
        if site1 in sites2:
            O1 = Node(op1, backend=self.backend)
            O2 = Node(op2, backend=self.backend)
            L = Node(ls[site1], backend=self.backend)
            R = Node(rs[site1], backend=self.backend)
            A = Node(self.tensors[site1], backend=self.backend)
            conj_A = conj(A)

            O1[1] ^ O2[0]
            L[0] ^ A[0]
            L[1] ^ conj_A[0]
            R[0] ^ A[2]
            R[1] ^ conj_A[2]
            A[1] ^ O2[1]
            conj_A[1] ^ O1[0]
            O = O1 @ O2
            res = (((L @ A) @ O) @ conj_A) @ R
            c.append(res.tensor)

        # compute <op1(site1) op2(site2)> for site1 < site2
        if len(right_sites) > 0:
            A = Node(self.tensors[site1], backend=self.backend)
            conj_A = conj(A)
            L = Node(ls[site1], backend=self.backend)
            O1 = Node(op1, backend=self.backend)
            L[0] ^ A[0]
            L[1] ^ conj_A[0]
            A[1] ^ O1[1]
            conj_A[1] ^ O1[0]
            L = L @ A @ O1 @ conj_A
            n2 = np.max(right_sites)
            #          -- A--
            #         |   |
            # compute | op1(site1)
            #         |   |
            #          -- A*--
            # and evolve it to the right by contracting tensors at site2 > site1
            # if site2 is in `sites2`, calculate the observable
            #
            #  ---A--........-- A--------
            # |   |             |        |
            # |  op1(site1)    op2(site2)|
            # |   |             |        |
            #  ---A--........-- A*-------
            for n in range(site1 + 1, n2 + 1):
                if n in right_sites:
                    R = Node(rs[n % N], backend=self.backend)
                    A = Node(self.tensors[n % N], backend=self.backend)
                    conj_A = conj(A)
                    O2 = Node(op2, backend=self.backend)
                    A[0] ^ L[0]
                    conj_A[0] ^ L[1]
                    O2[0] ^ conj_A[1]
                    O2[1] ^ A[1]
                    R[0] ^ A[2]
                    R[1] ^ conj_A[2]
                    res = L @ A @ O2 @ conj_A @ R
                    c.append(res.tensor)

                if n < n2:
                    L = Node(self.apply_transfer_operator(
                        n % N, 'left', L.tensor),
                             backend=self.backend)
        return [self.backend.item(o) for o in c]
def test_node_sanity_check(backend):
    t1, t2 = np.ones((2, 2)), np.ones((2, 2))
    n1, n2 = Node(t1, backend=backend), Node(t2, backend=backend)
    result_2 = ncon_interface.ncon([n1, n2], [(-1, 1), (1, -2)],
                                   backend=backend)
    np.testing.assert_allclose(result_2.tensor, np.ones((2, 2)) * 2)
def test_node_trace(backend):
    a = Node(np.ones((2, 2)), backend=backend)
    res = ncon_interface.ncon([a], [(1, 1)], backend=backend)
    np.testing.assert_allclose(res.tensor, 2)
def test_node_add_axis_names_int_throws_error():
  n1 = Node(np.eye(2), axis_names=['a', 'b'])
  with pytest.raises(TypeError):
    n1.add_axis_names([0, 1])  # pytype: disable=wrong-arg-types
def test_node_name_setter_raises_type_error(backend, name):
  n1 = Node(np.random.rand(2), backend=backend)
  with pytest.raises(TypeError):
    n1.name = name
def test_node_dtype(backend):
  n1 = Node(np.random.rand(2), backend=backend)
  assert n1.dtype == n1.tensor.dtype
示例#21
0
    def left_envs(self, sites: Sequence[int]) -> Dict:
        """Compute left reduced density matrices for site `sites`. This returns a
    dict `left_envs` mapping sites (int) to Tensors. `left_envs[site]` is the
    left-reduced density matrix to the left of site `site`.

    Args:
      sites (list of int): A list of sites of the MPS.
    Returns:
      `dict` mapping `int` to `Tensor`: The left-reduced density matrices
        at each  site in `sites`.
    """
        sites = np.array(sites)  #enable logical indexing
        if len(sites) == 0:
            return {}

        n2 = np.max(sites)

        #check if all elements of `sites` are within allowed range
        if not np.all(sites <= len(self)):
            raise ValueError(
                'all elements of `sites` have to be <= N = {}'.format(
                    len(self)))
        if not np.all(sites >= 0):
            raise ValueError('all elements of `sites` have to be positive')

        # left-reduced density matrices to the left of `center_position`
        # (including center_position) are all identities
        left_sites = sites[sites <= self.center_position]
        left_envs = {}
        for site in left_sites:
            left_envs[site] = Node(self.backend.eye(
                N=self.nodes[site].shape[0], dtype=self.dtype),
                                   backend=self.backend)

        # left reduced density matrices at sites > center_position
        # have to be calculated from a network contraction
        if n2 > self.center_position:
            nodes = {}
            conj_nodes = {}
            for site in range(self.center_position, n2):
                nodes[site] = Node(self.nodes[site], backend=self.backend)
                conj_nodes[site] = conj(self.nodes[site])

            nodes[self.center_position][0] ^ conj_nodes[
                self.center_position][0]
            nodes[self.center_position][1] ^ conj_nodes[
                self.center_position][1]

            for site in range(self.center_position + 1, n2):
                nodes[site][0] ^ nodes[site - 1][2]
                conj_nodes[site][0] ^ conj_nodes[site - 1][2]
                nodes[site][1] ^ conj_nodes[site][1]

            edges = {site: node[2] for site, node in nodes.items()}
            conj_edges = {site: node[2] for site, node in conj_nodes.items()}

            left_env = contract_between(nodes[self.center_position],
                                        conj_nodes[self.center_position])
            left_env.reorder_edges([
                edges[self.center_position], conj_edges[self.center_position]
            ])
            if self.center_position + 1 in sites:
                left_envs[self.center_position + 1] = left_env
            for site in range(self.center_position + 1, n2):
                left_env = contract_between(left_env, nodes[site])
                left_env = contract_between(left_env, conj_nodes[site])
                if site + 1 in sites:
                    left_env.reorder_edges([edges[site], conj_edges[site]])
                    left_envs[site + 1] = left_env
        return left_envs
def test_node_output_order(backend):
    t = np.random.randn(2, 2)
    a = Node(t, backend=backend)
    res = ncon_interface.ncon([a], [(-2, -1)], backend=backend)
    np.testing.assert_allclose(res.tensor, t.transpose())
示例#23
0
    def right_envs(self, sites: Sequence[int]) -> Dict:
        """Compute right reduced density matrices for site `sites. This returns a
    dict `right_envs` mapping sites (int) to Tensors. `right_envs[site]` is the
    right-reduced density matrix to the right of site `site`.

    Args:
      sites (list of int): A list of sites of the MPS.
    Returns:
      `dict` mapping `int` to `Tensor`: The right-reduced density matrices
        at each  site in `sites`.
    """
        sites = np.array(sites)
        if len(sites) == 0:
            return {}

        n1 = np.min(sites)
        #check if all elements of `sites` are within allowed range
        if not np.all(sites < len(self)):
            raise ValueError(
                'all elements of `sites` have to be < N = {}'.format(
                    len(self)))
        if not np.all(sites >= -1):
            raise ValueError('all elements of `sites` have to be >= -1')

        # right-reduced density matrices to the right of `center_position`
        # (including center_position) are all identities
        right_sites = sites[sites >= self.center_position]
        right_envs = {}
        for site in right_sites:
            right_envs[site] = Node(self.backend.eye(
                N=self.nodes[site].shape[2], dtype=self.dtype),
                                    backend=self.backend)

        # right reduced density matrices at sites < center_position
        # have to be calculated from a network contraction
        if n1 < self.center_position:
            nodes = {}
            conj_nodes = {}
            for site in reversed(range(n1 + 1, self.center_position + 1)):
                nodes[site] = Node(self.nodes[site], backend=self.backend)
                conj_nodes[site] = conj(self.nodes[site])

            nodes[self.center_position][2] ^ conj_nodes[
                self.center_position][2]
            nodes[self.center_position][1] ^ conj_nodes[
                self.center_position][1]

            for site in reversed(range(n1 + 1, self.center_position)):
                nodes[site][2] ^ nodes[site + 1][0]
                conj_nodes[site][2] ^ conj_nodes[site + 1][0]
                nodes[site][1] ^ conj_nodes[site][1]

            edges = {site: node[0] for site, node in nodes.items()}
            conj_edges = {site: node[0] for site, node in conj_nodes.items()}

            right_env = contract_between(nodes[self.center_position],
                                         conj_nodes[self.center_position])
            if self.center_position - 1 in sites:
                right_env.reorder_edges([
                    edges[self.center_position],
                    conj_edges[self.center_position]
                ])
                right_envs[self.center_position - 1] = right_env
            for site in reversed(range(n1 + 1, self.center_position)):
                right_env = contract_between(right_env, nodes[site])
                right_env = contract_between(right_env, conj_nodes[site])
                if site - 1 in sites:
                    right_env.reorder_edges([edges[site], conj_edges[site]])
                    right_envs[site - 1] = right_env

        return right_envs
示例#24
0
    def apply_two_site_gate(
            self,
            gate: Tensor,
            site1: int,
            site2: int,
            max_singular_values: Optional[int] = None,
            max_truncation_err: Optional[float] = None) -> Tensor:
        """
    Apply a two-site gate to an MPS. This routine will in general 
    destroy any canonical form of the state. If a canonical form is needed, 
    the user can restore it using MPS.position
    Args:
      gate (Tensor): a two-body gate
      site1, site2 (int, int): the sites where the gate should be applied
      max_singular_values (int): The maximum number of singular values to keep.
      max_truncation_err (float): The maximum allowed truncation error.
    """
        if len(gate.shape) != 4:
            raise ValueError('rank of gate is {} but has to be 4'.format(
                len(gate.shape)))
        if site1 < 0 or site1 >= len(self) - 1:
            raise ValueError(
                'site1 = {} is not between 0 <= site < N - 1 = {}'.format(
                    site1, len(self)))
        if site2 < 1 or site2 >= len(self):
            raise ValueError(
                'site2 = {} is not between 1 <= site < N = {}'.format(
                    site2, len(self)))
        if site2 <= site1:
            raise ValueError(
                'site2 = {} has to be larger than site2 = {}'.format(
                    site2, site1))
        if site2 != site1 + 1:
            raise ValueError(
                'site2 ={} != site1={}. Only nearest neighbor gates are currently '
                'supported'.format(site2, site1))

        if (max_singular_values
                or max_truncation_err) and self.center_position not in (site1,
                                                                        site2):
            raise ValueError(
                'center_position = {}, but gate is applied at sites {}, {}. '
                'Truncation should only be done if the gate '
                'is applied at the center position of the MPS'.format(
                    self.center_position, site1, site2))

        gate_node = Node(gate, backend=self.backend.name)
        gate_node[2] ^ self.nodes[site1][1]
        gate_node[3] ^ self.nodes[site2][1]
        left_edges = [self.nodes[site1][0], gate_node[0]]
        right_edges = [gate_node[1], self.nodes[site2][2]]
        result = self.nodes[site1] @ self.nodes[site2] @ gate_node
        U, S, V, tw = split_node_full_svd(
            result,
            left_edges=left_edges,
            right_edges=right_edges,
            max_singular_values=max_singular_values,
            max_truncation_err=max_truncation_err,
            left_name=self.nodes[site1].name,
            right_name=self.nodes[site2].name)
        V.reorder_edges([S[1]] + right_edges)
        left_edges = left_edges + [S[1]]
        self.nodes[site1] = contract_between(
            U, S, name=U.name).reorder_edges(left_edges)
        self.nodes[site2] = V
        return tw
def test_conj(backend):
    if backend == "pytorch":
        pytest.skip("Complex numbers currently not supported in PyTorch")
    a = Node(np.random.rand(3, 3) + 1j * np.random.rand(3, 3), backend=backend)
    abar = node_linalg.conj(a)
    np.testing.assert_allclose(abar.tensor, a.backend.conj(a.tensor))
def test_node_axis_names_setter_throws_shape_small_mismatch_error():
  n1 = Node(np.eye(2), axis_names=['a', 'b'])
  with pytest.raises(ValueError):
    n1.axis_names = ['a']
def split_node_full_svd(
    node: BaseNode,
    left_edges: List[Edge],
    right_edges: List[Edge],
    max_singular_values: Optional[int] = None,
    max_truncation_err: Optional[float] = None,
    left_name: Optional[Text] = None,
    middle_name: Optional[Text] = None,
    right_name: Optional[Text] = None,
    left_edge_name: Optional[Text] = None,
    right_edge_name: Optional[Text] = None,
) -> Tuple[BaseNode, BaseNode, BaseNode, Tensor]:
    """Split a node by doing a full singular value decomposition.

  Let M be the matrix created by flattening left_edges and right_edges into
  2 axes. Let :math:`U S V^* = M` be the Singular Value Decomposition of
  :math:`M`.

  The left most node will be :math:`U` tensor of the SVD, the middle node is
  the diagonal matrix of the singular values, ordered largest to smallest,
  and the right most node will be the :math:`V*` tensor of the SVD.

  The singular value decomposition is truncated if `max_singular_values` or
  `max_truncation_err` is not `None`.

  The truncation error is the 2-norm of the vector of truncated singular
  values. If only `max_truncation_err` is set, as many singular values will
  be truncated as possible while maintaining:
  `norm(truncated_singular_values) <= max_truncation_err`.

  If only `max_singular_values` is set, the number of singular values kept
  will be `min(max_singular_values, number_of_singular_values)`, so that
  `max(0, number_of_singular_values - max_singular_values)` are truncated.

  If both `max_truncation_err` and `max_singular_values` are set,
  `max_singular_values` takes priority: The truncation error may be larger
  than `max_truncation_err` if required to satisfy `max_singular_values`.

  Args:
    node: The node you want to split.
    left_edges: The edges you want connected to the new left node.
    right_edges: The edges you want connected to the new right node.
    max_singular_values: The maximum number of singular values to keep.
    max_truncation_err: The maximum allowed truncation error.
    left_name: The name of the new left node. If None, a name will be 
      generated automatically.
    middle_name: The name of the new center node. If None, a name will be 
      generated automatically.
    right_name: The name of the new right node. If None, a name will be 
      generated automatically.
    left_edge_name: The name of the new left `Edge` connecting
      the new left node (`U`) and the new central node (`S`).
      If `None`, a name will be generated automatically.
    right_edge_name: The name of the new right `Edge` connecting
      the new central node (`S`) and the new right node (`V*`).
      If `None`, a name will be generated automatically.

  Returns:
    A tuple containing:
      left_node:
        A new node created that connects to all of the `left_edges`.
        Its underlying tensor is :math:`U`
      singular_values_node:
        A new node that has 2 edges connecting `left_node` and `right_node`.
        Its underlying tensor is :math:`S`
      right_node:
        A new node created that connects to all of the `right_edges`.
        Its underlying tensor is :math:`V^*`
      truncated_singular_values:
        The vector of truncated singular values.
  """
    if not hasattr(node, 'backend'):
        raise TypeError('Node {} of type {} has no `backend`'.format(
            node, type(node)))

    if node.axis_names and left_edge_name and right_edge_name:
        left_axis_names = []
        right_axis_names = [right_edge_name]
        for edge in left_edges:
            left_axis_names.append(node.axis_names[edge.axis1] if edge.node1 is
                                   node else node.axis_names[edge.axis2])
        for edge in right_edges:
            right_axis_names.append(node.axis_names[edge.axis1] if edge.node1
                                    is node else node.axis_names[edge.axis2])
        left_axis_names.append(left_edge_name)
        center_axis_names = [left_edge_name, right_edge_name]
    else:
        left_axis_names = None
        center_axis_names = None
        right_axis_names = None

    backend = node.backend

    node.reorder_edges(left_edges + right_edges)
    u, s, vh, trun_vals = backend.svd_decomposition(node.tensor,
                                                    len(left_edges),
                                                    max_singular_values,
                                                    max_truncation_err)
    left_node = Node(u,
                     name=left_name,
                     axis_names=left_axis_names,
                     backend=backend.name)
    singular_values_node = Node(backend.diag(s),
                                name=middle_name,
                                axis_names=center_axis_names,
                                backend=backend.name)

    right_node = Node(vh,
                      name=right_name,
                      axis_names=right_axis_names,
                      backend=backend.name)

    for i, edge in enumerate(left_edges):
        left_node.add_edge(edge, i)
        edge.update_axis(i, node, i, left_node)
    for i, edge in enumerate(right_edges):
        # i + 1 to account for the new edge.
        right_node.add_edge(edge, i + 1)
        edge.update_axis(i + len(left_edges), node, i + 1, right_node)
    connect(left_node.edges[-1],
            singular_values_node.edges[0],
            name=left_edge_name)
    connect(singular_values_node.edges[1],
            right_node.edges[0],
            name=right_edge_name)
    return left_node, singular_values_node, right_node, trun_vals
def test_transpose(backend):
    a = Node(np.random.rand(1, 2, 3, 4, 5), backend=backend)
    order = [a[n] for n in reversed(range(5))]
    transpa = node_linalg.transpose(a, [4, 3, 2, 1, 0])
    a.reorder_edges(order)
    np.testing.assert_allclose(a.tensor, transpa.tensor)
示例#29
0
    def apply_two_site_gate(
            self,
            gate: Tensor,
            site1: int,
            site2: int,
            max_singular_values: Optional[int] = None,
            max_truncation_err: Optional[float] = None) -> Tensor:
        """Apply a two-site gate to an MPS. This routine will in general destroy
    any canonical form of the state. If a canonical form is needed, the user
    can restore it using `FiniteMPS.position`.

    Args:
      gate: A two-body gate.
      site1: The first site where the gate acts.
      site2: The second site where the gate acts.
      max_singular_values: The maximum number of singular values to keep.
      max_truncation_err: The maximum allowed truncation error.

    Returns:
      `Tensor`: A scalar tensor containing the truncated weight of the
        truncation.
    """
        if len(gate.shape) != 4:
            raise ValueError('rank of gate is {} but has to be 4'.format(
                len(gate.shape)))
        if site1 < 0 or site1 >= len(self) - 1:
            raise ValueError(
                'site1 = {} is not between 0 <= site < N - 1 = {}'.format(
                    site1, len(self)))
        if site2 < 1 or site2 >= len(self):
            raise ValueError(
                'site2 = {} is not between 1 <= site < N = {}'.format(
                    site2, len(self)))
        if site2 <= site1:
            raise ValueError(
                'site2 = {} has to be larger than site2 = {}'.format(
                    site2, site1))
        if site2 != site1 + 1:
            raise ValueError("Found site2 ={}, site1={}. Only nearest "
                             "neighbor gates are currently"
                             "supported".format(site2, site1))

        if (max_singular_values
                or max_truncation_err) and self.center_position not in (site1,
                                                                        site2):
            raise ValueError(
                'center_position = {}, but gate is applied at sites {}, {}. '
                'Truncation should only be done if the gate '
                'is applied at the center position of the MPS'.format(
                    self.center_position, site1, site2))

        gate_node = Node(gate, backend=self.backend)
        node1 = Node(self.tensors[site1], backend=self.backend)
        node2 = Node(self.tensors[site2], backend=self.backend)
        node1[2] ^ node2[0]
        gate_node[2] ^ node1[1]
        gate_node[3] ^ node2[1]
        left_edges = [node1[0], gate_node[0]]
        right_edges = [gate_node[1], node2[2]]
        result = node1 @ node2 @ gate_node
        U, S, V, tw = split_node_full_svd(
            result,
            left_edges=left_edges,
            right_edges=right_edges,
            max_singular_values=max_singular_values,
            max_truncation_err=max_truncation_err,
            left_name=node1.name,
            right_name=node2.name)
        V.reorder_edges([S[1]] + right_edges)
        left_edges = left_edges + [S[1]]
        res = contract_between(U, S, name=U.name).reorder_edges(left_edges)
        self.tensors[site1] = res.tensor
        self.tensors[site2] = V.tensor
        return tw
def test_node_out_of_order_contraction(backend):
    a = Node(np.ones((2, 2, 2)), backend=backend)
    with pytest.warns(UserWarning, match='Suboptimal ordering'):
        ncon_interface.ncon([a, a, a], [(-1, 1, 3), (1, 3, 2), (2, -2, -3)],
                            backend=backend)