예제 #1
0
def test_eigh_vs_backend(backend, dtype):
    shape = (3, 6, 4, 4)
    dtype = testing_utils.np_dtype_to_backend(backend, dtype)
    tensor = tensornetwork.ones(shape, backend=backend, dtype=dtype)
    tn_result = linalg.eigh(tensor)
    if backend is None:
        backend = backend_contextmanager.get_default_backend()
    backend_obj = backends.backend_factory.get_backend(backend)
    backend_result = backend_obj.eigh(tensor.array)
    tn_arrays = [t.array for t in tn_result]
    for tn_arr, backend_arr in zip(tn_arrays, backend_result):
        np.testing.assert_allclose(tn_arr, backend_arr)
예제 #2
0
def test_eigh_vs_backend(backend, dtype):
  np.random.seed(10)
  shape = (4, 4)
  dtype = testing_utils.np_dtype_to_backend(backend, dtype)
  tensor = initialize_hermitian_matrix(backend, shape, dtype)
  tn_result = linalg.eigh(tensor)
  if backend is None:
    backend = backend_contextmanager.get_default_backend()
  backend_obj = backends.backend_factory.get_backend(backend)
  backend_result = backend_obj.eigh(tensor.array)
  tn_arrays = [t.array for t in tn_result]
  for tn_arr, backend_arr in zip(tn_arrays, backend_result):
    testing_utils.assert_allclose(tn_arr, backend_arr, backend_obj)
예제 #3
0
def test_rq_vs_backend(backend, dtype):
    shape = (3, 6, 4, 2)
    dtype = testing_utils.np_dtype_to_backend(backend, dtype)
    tensor = tensornetwork.ones(shape, backend=backend, dtype=dtype)
    split_axis = 1
    tn_result = linalg.rq(tensor, split_axis, non_negative_diagonal=False)
    if backend is None:
        backend = backend_contextmanager.get_default_backend()
    backend_obj = backends.backend_factory.get_backend(backend)
    backend_result = backend_obj.rq(tensor.array, split_axis)
    tn_arrays = [t.array for t in tn_result]
    for tn_arr, backend_arr in zip(tn_arrays, backend_result):
        np.testing.assert_allclose(tn_arr, backend_arr)
예제 #4
0
def test_rq_vs_backend(backend, dtype):
  np.random.seed(10)
  shape = (3, 6, 4, 2)
  dtype = testing_utils.np_dtype_to_backend(backend, dtype)
  tensor = initialize_tensor('randn', backend, shape, dtype)
  split_axis = 1
  tn_result = linalg.rq(tensor, split_axis, non_negative_diagonal=False)
  if backend is None:
    backend = backend_contextmanager.get_default_backend()
  backend_obj = backends.backend_factory.get_backend(backend)
  backend_result = backend_obj.rq(tensor.array, split_axis)
  tn_arrays = [t.array for t in tn_result]
  for tn_arr, backend_arr in zip(tn_arrays, backend_result):
    testing_utils.assert_allclose(tn_arr, backend_arr, backend_obj)
예제 #5
0
def set_tensornetwork_backend(backend: Optional[str] = None) -> None:
    """
    set the runtime backend of tensornetwork

    :param backend: numpy, tensorflow, jax, pytorch
    :return:
    """
    if not backend:
        backend = get_default_backend()
    backend_obj = get_backend(backend)
    for module in modules:
        if module in sys.modules:
            setattr(sys.modules[module], "backend", backend_obj)
    tn.set_default_backend(backend)
예제 #6
0
def test_init_tensor_default_backend(dtype):
  """ Creates a numpy array, initializes a Tensor from it, and checks that all
  its members have been correctly initialized.
  """
  backend = backend_contextmanager.get_default_backend()
  backend_obj = backends.backend_factory.get_backend(backend)
  shape = (3, 5, 2)
  testA = backend_obj.zeros(shape, dtype=dtype)
  init = np.zeros(shape, dtype=dtype)
  A = tensornetwork.Tensor(init)
  assert A.backend.name == backend
  np.testing.assert_allclose(A.array, testA)
  assert A.shape == testA.shape
  assert A.size == testA.size
  assert A.ndim == testA.ndim
예제 #7
0
def test_svd_vs_backend(backend, dtype):
  np.random.seed(10)
  shape = (3, 6, 4, 6)
  dtype = testing_utils.np_dtype_to_backend(backend, dtype)
  tensor = initialize_tensor('randn', backend, shape, dtype)
  split_axis = 1
  max_singular_values = 5
  max_trunc_error = 0.1
  relative = True
  tn_result = linalg.svd(tensor, split_axis,
                         max_singular_values=max_singular_values,
                         max_truncation_error=max_trunc_error,
                         relative=relative)
  if backend is None:
    backend = backend_contextmanager.get_default_backend()
  backend_obj = backends.backend_factory.get_backend(backend)
  backend_result = backend_obj.svd(tensor.array, split_axis,
                                   max_singular_values=max_singular_values,
                                   max_truncation_error=max_trunc_error,
                                   relative=relative)
  tn_arrays = [t.array for t in tn_result]
  for tn_arr, backend_arr in zip(tn_arrays, backend_result):
    testing_utils.assert_allclose(tn_arr, backend_arr, backend_obj)
예제 #8
0
def initialize_node(fname: Text,
                    *fargs: Any,
                    name: Optional[Text] = None,
                    axis_names: Optional[List[Text]] = None,
                    backend: Optional[Union[Text, BaseBackend]] = None,
                    **fkwargs: Any) -> Tensor:
    """Return a Node wrapping data obtained by an initialization function
  implemented in a backend. The Node will have the same shape as the
  underlying array that function generates, with all Edges dangling.

  This function is not intended to be called directly, but doing so should
  be safe enough.
  Args:
    fname:  Name of the method of backend to call (a string).
    *fargs: Positional arguments to the initialization method.
    name: Optional name of the Node.
    axis_names: Optional names of the Node's dangling edges.
    backend: The backend or its name.
    **fkwargs: Keyword arguments to the initialization method.

  Returns:
    node: A Node wrapping data generated by
          (the_backend).fname(*fargs, **fkwargs), with one dangling edge per
          axis of data.
  """
    if backend is None:
        backend_obj = backend_contextmanager.get_default_backend()
    else:
        backend_obj = backends.backend_factory.get_backend(backend)
    func = getattr(backend_obj, fname)
    data = func(*fargs, **fkwargs)
    node = network_components.Node(data,
                                   name=name,
                                   axis_names=axis_names,
                                   backend=backend)
    return node
예제 #9
0
    def __init__(
            self,
            tensors: List[Tensor],
            center_position: Optional[int] = None,
            connector_matrix: Optional[Tensor] = None,
            backend: Optional[Union[Text, AbstractBackend]] = None) -> None:
        """Initialize a BaseMPS.

    Args:
      tensors: A list of `Tensor` objects.
      center_position: The initial position of the center site.
      connector_matrix: A `Tensor` of rank 2 connecting
        different unitcells. A value `None` is equivalent to an identity
        `connector_matrix`.
      backend: The name of the backend that should be used to perform
        contractions. Available backends are currently 'numpy', 'tensorflow',
        'pytorch', 'jax'
    """
        if (center_position is not None) and (center_position < 0 or
                                              center_position >= len(tensors)):
            raise ValueError(
                "`center_position = {}` is different from `None` and "
                "not between 0 <= center_position < {}".format(
                    center_position, len(tensors)))
        if backend is None:
            backend = get_default_backend()
        if isinstance(backend, AbstractBackend):
            self.backend = backend
        else:
            self.backend = backend_factory.get_backend(backend)

        # the dtype is deduced from the tensor object.
        self.tensors = [self.backend.convert_to_tensor(t) for t in tensors]
        if not all(
            [self.tensors[0].dtype == tensor.dtype
             for tensor in self.tensors]):
            raise TypeError('not all dtypes in BaseMPS.tensors are the same')

        self.connector_matrix = connector_matrix
        self.center_position = center_position

        ########################################################################
        ##########       define functions for jitted operations       ##########
        ########################################################################
        @partial(jit, backend=self.backend, static_argnums=(1, ))
        def svd(tensor, max_singular_values=None):
            return self.backend.svd(tensor=tensor,
                                    pivot_axis=2,
                                    max_singular_values=max_singular_values)

        self.svd = svd

        @partial(jit, backend=self.backend)
        def qr(tensor):
            return self.backend.qr(tensor, 2)

        self.qr = qr

        @partial(jit, backend=self.backend)
        def rq(tensor):
            return self.backend.rq(tensor, 1)

        self.rq = rq

        self.norm = self.backend.jit(self.backend.norm)
예제 #10
0
  def __init__(self,
               tensors: List[Tensor],
               center_position: Optional[int] = None,
               connector_matrix: Optional[Tensor] = None,
               backend: Optional[Union[Text, BaseBackend]] = None) -> None:
    """Initialize a BaseMPS.

    Args:
      tensors: A list of `Tensor` objects.
      center_position: The initial position of the center site.
      connector_matrix: A `Tensor` of rank 2 connecting
        different unitcells. A value `None` is equivalent to an identity
        `connector_matrix`.
      backend: The name of the backend that should be used to perform
        contractions. Available backends are currently 'numpy', 'tensorflow',
        'pytorch', 'jax'
    """
    if (center_position is not None) and (center_position < 0 or
                                          center_position >= len(tensors)):
      raise ValueError("`center_position = {}` is different from `None` and "
                       "not between 0 <= center_position < {}".format(
                           center_position, len(tensors)))
    if backend is None:
      backend = get_default_backend()
    if isinstance(backend, BaseBackend):
      self.backend = backend
    else:
      self.backend = backend_factory.get_backend(backend)

    # the dtype is deduced from the tensor object.
    self.tensors = [self.backend.convert_to_tensor(t) for t in tensors]
    if not all(
        [self.tensors[0].dtype == tensor.dtype for tensor in self.tensors]):
      raise TypeError('not all dtypes in BaseMPS.tensors are the same')

    self.connector_matrix = connector_matrix
    self.center_position = center_position

    ########################################################################
    ##########       define functions for jitted operations       ##########
    ########################################################################
    def qr_decomposition(tensor):
      return self.backend.qr_decomposition(tensor, 2)

    self.qr_decomposition = self.backend.jit(qr_decomposition)

    def rq_decomposition(tensor):
      return self.backend.rq_decomposition(tensor, 1)

    self.rq_decomposition = self.backend.jit(rq_decomposition)

    def left_transfer_operator(A, l, Abar):
      return ncon([A, l, Abar], [[1, 2, -1], [1, 3], [3, 2, -2]],
                  backend=self.backend.name)

    self.left_transfer_operator = self.backend.jit(left_transfer_operator)

    def right_transfer_operator(B, r, Bbar):
      return ncon([B, r, Bbar], [[-1, 2, 1], [1, 3], [-2, 2, 3]],
                  backend=self.backend.name)

    self.right_transfer_operator = self.backend.jit(right_transfer_operator)

    def lcontract(R, tensor):
      return ncon([R, tensor], [[-1, 1], [1, -2, -3]],
                  backend=self.backend.name)

    self.lcontract = self.backend.jit(lcontract)

    def rcontract(tensor, R):
      return ncon([tensor, R], [[-1, -2, 1], [1, -3]],
                  backend=self.backend.name)

    self.rcontract = self.backend.jit(rcontract)
    self.norm = self.backend.jit(self.backend.norm)
예제 #11
0
def jit(fun: Callable,
        backend: Union[Text, AbstractBackend] = None,
        backend_argnum: Optional[int] = None,
        static_argnums: Union[int, Iterable[int]] = (), device=None,
        xla_backend: Optional[str] = None) -> Callable:
  """
  Return a jitted or graph-compiled version of `fun`
  for JAX backend. For all other backends returns `fun`.
  Args:
    fun: Callable
    backend: The backend.
    backend_argnum: Labels the argument of the decorated function which
                    specifies the backend.
                    This argument will be treated
                    as static in the sense of static_argnums.
                    If backend_argnum is specified, backend must be None.
    static_argnums: Label the arguments which will be statically compiled
                    against.
    xla_backend: Specifies the backend ('gpu', 'cpu'...) against which
                 XLA is to run.
    donate_argnums: Labels arguments that Jit is allowed to overwrite.
    args: Arguments to `fun`.
    kwargs: Keyword arguments to `fun`.

  Raises:
    ValueError: If backend_argnum is specified but backend is not None.

                If backend_argnum is specified but the corresponding
                argument neither is nor labels a backend.
  Returns:
    Callable: jitted/graph-compiled version of `fun`, or just `fun`.
  """
  argnum_mode = False
  if backend_argnum is not None:
    if backend is not None:
      raise ValueError("backend must be None if backend_argnum is specified.")
    argnum_mode = True
    static_argnums = tuple(list(static_argnums) + [backend_argnum,])

  if not argnum_mode:
    if backend is None:
      backend = backend_contextmanager.get_default_backend()
    backend_obj = backends.backend_factory.get_backend(backend)

    @functools.wraps(fun)
    def wrapper(*args, **kwargs):
      jitted = backend_obj.jit(fun, static_argnums=static_argnums,
                               device=device, backend=xla_backend)
      return jitted(*args, **kwargs)
  else:
    @functools.wraps(fun)
    def wrapper(*args, **kwargs):
      backend = args[backend_argnum]
      try:
        backend_obj = backends.backend_factory.get_backend(backend)
      except ValueError as error:
        errstr = (f"backend_argnum={backend_argnum} was specified"
                  f"but the corresponding argument {args[backend_argnum]}"
                  f"did not specify a backend.")
        raise ValueError(errstr) from error
      jitted = backend_obj.jit(fun, static_argnums=static_argnums,
                               device=device, backend=xla_backend)
      return jitted(*args, **kwargs)
  return wrapper
예제 #12
0
def ncon(
    tensors: Sequence[Union[tn_tensor.Tensor, Tensor]],
    network_structure: Sequence[Sequence[Union[str, int]]],
    con_order: Optional[Sequence] = None,
    out_order: Optional[Sequence] = None,
    check_network: bool = True,
    backend: Optional[Union[Text, AbstractBackend]] = None
) -> Union[tn_tensor.Tensor, Tensor]:
    r"""Contracts a list of backend-tensors or  `Tensor`s 
    according to a tensor network 
    specification.

    The network is provided as a list of lists, one for each
    tensor, specifying the labels for the edges connected to that tensor.
    
    Labels can be any numbers or strings. Negative number-type labels
    and string-type labels with a prepended hyphen ('-') are open labels
    and remain uncontracted.

    Positive number-type labels and string-type labels with no prepended 
    hyphen ('-') are closed labels and are contracted.

    Any open label appearing more than once is treated as an open 
    batch label. Any closed label appearing more than once is treated as 
    a closed batch label.

    Upon finishing the contraction, all open batch labels will have been 
    collapsed into a single dimension, and all closed batch labels will 
    have been summed over.

    If `out_order = None`, output labels are ordered according to descending
    number ordering and ascending ASCII ordering, with number labels always 
    appearing before string labels. Example:
    network_structure = [[-1, 1, '-rick', '2',-2], [-2, '2', 1, '-morty']] 
    results in an output order of [-1, -2, '-morty', '-rick'].

    If `out_order` is given, the indices of the resulting tensor will be
    transposed into this order. 
    
    If `con_order = None`, `ncon` will first contract all number labels 
    in ascending order followed by all string labels in ascending ASCII 
    order.
    If `con_order` is given, `ncon` will contract according to this order.

    For example, matrix multiplication:

    .. code-block:: python

      A = np.array([[1.0, 2.0], [3.0, 4.0]])
      B = np.array([[1.0, 1.0], [0.0, 1.0]])
      ncon([A,B], [(-1, 1), (1, -2)])

    Matrix trace:

    .. code-block:: python

      A = np.array([[1.0, 2.0], [3.0, 4.0]])
      ncon([A], [(1, 1)]) # 5.0

    Note: 
      Disallowing `0` as an edge label is legacy behaviour, see
      `original NCON implementation`_. 
    
    .. _original NCON implementation:
      https://arxiv.org/abs/1402.0939
    
    Args:
      tensors: List of backend-tensors or `Tensor`s.
      network_structure: List of lists specifying the tensor network structure.
      con_order: List of edge labels specifying the contraction order.
      out_order: List of edge labels specifying the output order.
      check_network: Boolean flag. If `True` check the network.
      backend: String specifying the backend to use. Defaults to
        `tensornetwork.backend_contextmanager.get_default_backend`.

    Returns:
      The result of the contraction: 
        * A backend-tensor: If all elements of `tensors` are backend-tensors.
        * A `Tensor`: If all elements of `tensors` are `Tensor` objects.
    """

    # TODO (mganahl): for certain cases np.einsum is still faster than ncon:
    # - contractions containing batched outer products with small dimensions
    # This should eventually be fixed, but it's not a priority.

    if backend is None:
        backend = get_default_backend()
    if isinstance(backend, AbstractBackend):
        backend_obj = backend
    else:
        backend_obj = backend_factory.get_backend(backend)

    if out_order == []:  #allow empty list as input
        out_order = None
    if con_order == []:  #allow empty list as input
        con_order = None

    are_tensors = [isinstance(t, tn_tensor.Tensor) for t in tensors]
    tensors_set = {t for t in tensors if isinstance(t, tn_tensor.Tensor)}
    if not all([n.backend.name == backend_obj.name for n in tensors_set]):
        raise ValueError(
            "Some tensors have backends different from '{}'".format(
                backend_obj.name))

    _tensors = []
    for t in tensors:
        if isinstance(t, tn_tensor.Tensor):
            _tensors.append(t.array)
        else:
            _tensors.append(t)
    _tensors = [backend_obj.convert_to_tensor(t) for t in _tensors]
    if check_network:
        _check_network(network_structure, [t.shape for t in _tensors],
                       con_order, out_order)
    network_structure, mapping = _canonicalize_network_structure(
        network_structure)
    flat_labels = [l for sublist in network_structure for l in sublist]
    unique_flat_labels = list(set(flat_labels))
    if out_order is None:
        # negative batch labels (negative labels appearing more than once)
        # are subject to the same output ordering as regular output labels
        out_order = sorted([l for l in unique_flat_labels if l < 0],
                           reverse=True)
    else:
        out_order = [mapping[o] for o in out_order]
    if con_order is None:
        # canonicalization of network structure takes care of appropriate
        # contraction ordering (i.e. use ASCII ordering for str and
        # regular ordering for int)
        # all positive labels appearing are considered proper contraction labels.
        con_order = sorted([l for l in unique_flat_labels if l > 0])
    else:
        con_order = [mapping[o] for o in con_order]
    if backend not in _CACHED_JITTED_NCONS:
        _CACHED_JITTED_NCONS[backend] = backend_obj.jit(_jittable_ncon,
                                                        static_argnums=(1, 2,
                                                                        3, 4,
                                                                        5))
    sizes = tuple([len(l) for l in network_structure])
    res_tensor = _CACHED_JITTED_NCONS[backend](_tensors, tuple(flat_labels),
                                               sizes, tuple(con_order),
                                               tuple(out_order), backend_obj)
    if all(are_tensors):
        return tn_tensor.Tensor(res_tensor, backend=backend_obj)
    return res_tensor
예제 #13
0
def ncon(
    tensors: Sequence[Union[network_components.BaseNode, Tensor]],
    network_structure: Sequence[Sequence],
    con_order: Optional[Sequence] = None,
    out_order: Optional[Sequence] = None,
    check_network: bool = True,
    backend: Optional[Union[Text, BaseBackend]] = None
) -> Union[network_components.BaseNode, Tensor]:
  r"""Contracts a list of tensors or nodes according to a tensor network 
    specification.

    The network is provided as a list of lists, one for each
    tensor, specifying labels for the edges connected to that tensor.
    
    Labels appearing only once in `network_structure` (open labels)
    remain uncontracted, labels appearing twice (contracted labels) are
    contracted over. 
    If `out_order = None`, output labels can either be negative numbers or
    strings with a hyphen character ('-') prepended, e.g. '-out_label_1'.
    If `out_order = None` output labels are ordered according to descending
    number ordering and ascending ASCII ordering, with number labels always 
    appearing before string labels. Example:
    network_structure = [[-1, 1, '-3', '2'], [-2, '2', 1, '-33']] results 
    in an output order of [-1, -2, '-3', '-33'].
    If `out_order` is given, the indices of the resulting tensor will be
    transposed into this order. In this case output labels can be arbitrary
    numbers and arbitrary strings (no minus or hyphen necessary).
    
    If `con_order = None`, `ncon` will first contract all number labels 
    in ascending order followed by all string labels in ascending ASCII 
    order.
    If `con_order` is given, `ncon` will contract according to this order.

    For example, matrix multiplication:

    .. code-block:: python

      A = np.array([[1.0, 2.0], [3.0, 4.0]])
      B = np.array([[1.0, 1.0], [0.0, 1.0]])
      ncon([A,B], [(-1, 1), (1, -2)])

    Matrix trace:

    .. code-block:: python

      A = np.array([[1.0, 2.0], [3.0, 4.0]])
      ncon([A], [(1, 1)]) # 5.0

    Note: 
      The reason `0` is not allowed as an edge label without manually
      specifying the contraction order is to maintain compatibility with the
      `original NCON implementation`_. However, the use of `0` in `con_order` 
      to denote outer products is not (currently) 
      supported in this implementation.
    
    .. _original NCON implementation:
      https://arxiv.org/abs/1402.0939

    Args:
      tensors: List of `Tensors` or `BaseNodes`.
      network_structure: List of lists specifying the tensor network structure.
      con_order: List of edge labels specifying the contraction order.
      out_order: List of edge labels specifying the output order.
      check_network: Boolean flag. If `True` check the network.
      backend: String specifying the backend to use. Defaults to
        `tensornetwork.backend_contextmanager.get_default_backend`.

    Returns:
      The result of the contraction. The result is returned as a `Node`
      if all elements of `tensors` are `BaseNode` objects, else
      it is returned as a `Tensor` object.
    """
  if backend is None:
    backend = get_default_backend()
  if isinstance(backend, BaseBackend):
    backend_obj = backend
  else:
    backend_obj = backend_factory.get_backend(backend)

  if out_order == []:  #allow empty list as input
    out_order = None
  if con_order == []:  #allow empty list as input
    con_order = None

  # convert to lists
  network_structure = [list(l) for l in network_structure]
  are_nodes = [isinstance(t, network_components.BaseNode) for t in tensors]
  nodes = {t for t in tensors if isinstance(t, network_components.BaseNode)}
  if not all([n.backend.name == backend_obj.name for n in nodes]):
    raise ValueError("Some nodes have backends different from '{}'".format(
        backend_obj.name))

  _tensors = []
  for t in tensors:
    if isinstance(t, network_components.BaseNode):
      _tensors.append(t.tensor)
    else:
      _tensors.append(t)
  _tensors = [backend_obj.convert_to_tensor(t) for t in _tensors]
  if check_network:
    cont_labels, out_labels = _check_network(network_structure,
                                             [t.shape for t in _tensors],
                                             con_order, out_order)
  else:
    # map the network strcuture to integers; if any of the labels is a `str`
    # type, the ordering defaults to string-ordering, i.e.
    # [[1, 2, '12'], [1, 9]] -> [[1, -2, -1],[1, -3]]
    # pylint: disable=line-too-long
    int_cont_labels, str_cont_labels, int_out_labels, str_out_labels = _get_cont_out_labels(
        network_structure)
    cont_labels = int_cont_labels + str_cont_labels
    out_labels = int_out_labels + str_out_labels
  network_structure, mapping = _canonicalize_network_structure(
      cont_labels, out_labels, network_structure)

  network_structure = [np.array(l) for l in network_structure]
  flat_connections = np.concatenate(network_structure)
  if out_order is None:
    out_order = np.sort(flat_connections[flat_connections < 0])[::-1]
  else:
    l = []
    for o in out_order:
      l.append(mapping[o])
    out_order = np.array(l)
  if con_order is None:
    #canonicalization of network structure takes care of appropriate
    #contraction ordering (i.e. use ASCII ordering for str and
    #regular ordering for int)
    con_order = np.unique(flat_connections[flat_connections > 0])
  else:
    l = []
    for o in con_order:
      l.append(mapping[o])
    con_order = np.array(l)

  if backend not in _CACHED_JITTED_NCONS:
    _CACHED_JITTED_NCONS[backend] = backend_obj.jit(
        _jittable_ncon, static_argnums=(1, 2, 3, 4))
  res_tensor = _CACHED_JITTED_NCONS[backend](_tensors, network_structure,
                                             con_order, out_order, backend_obj)
  if all(are_nodes):
    return network_components.Node(res_tensor, backend=backend_obj)
  return res_tensor
def ncon(
    tensors: Sequence[Union[network_components.BaseNode, Tensor]],
    network_structure: Sequence[Sequence],
    con_order: Optional[Sequence] = None,
    out_order: Optional[Sequence] = None,
    backend: Optional[Union[Text, BaseBackend]] = None
) -> Union[network_components.BaseNode, Tensor]:
    r"""Contracts a list of tensors or nodes according to a tensor network 
    specification.

    The network is provided as a list of lists, one for each
    tensor, specifying labels for the edges connected to that tensor.

    If a contraction order `con_order` and an output order `out_order`
    are both provided, the edge labels can be anything.
    Otherwise (`con_order == None or out_order == None`), the edge labels 
    must be nonzero integers and edges will be contracted in ascending order.
    Negative integers denote the (dangling) indices of the output tensor,
    which will be in descending order, e.g. `[-1,-2,-3,...]`.

    For example, matrix multiplication:

    .. code-block:: python

      A = np.array([[1.0, 2.0], [3.0, 4.0]])
      B = np.array([[1.0, 1.0], [0.0, 1.0]])
      ncon([A,B], [(-1, 1), (1, -2)])

    Matrix trace:

    .. code-block:: python

      A = np.array([[1.0, 2.0], [3.0, 4.0]])
      ncon([A], [(1, 1)]) # 5.0

    Note: 
      The reason `0` is not allowed as an edge label without manually
      specifying the contraction order is to maintain compatibility with the
      `original NCON implementation`_. However, the use of `0` in `con_order` 
      to denote outer products is not (currently) 
      supported in this implementation.
    
    .. _original NCON implementation:
      https://arxiv.org/abs/1402.0939

    Args:
      tensors: List of `Tensors` or `BaseNodes`.
      network_structure: List of lists specifying the tensor network structure.
      con_order: List of edge labels specifying the contraction order.
      out_order: List of edge labels specifying the output order.
      backend: String specifying the backend to use. Defaults to
        `tensornetwork.backend_contextmanager.get_default_backend`.

    Returns:
      The result of the contraction. The result is returned as a `Node`
      if all elements of `tensors` are `BaseNode` objects, else
      it is returned as a `Tensor` object.
    """
    if backend is None:
        backend = get_default_backend()
    if isinstance(backend, BaseBackend):
        backend_obj = backend
    else:
        backend_obj = backend_factory.get_backend(backend)

    are_nodes = [isinstance(t, network_components.BaseNode) for t in tensors]
    nodes = {t for t in tensors if isinstance(t, network_components.BaseNode)}
    if not all([n.backend.name == backend_obj.name for n in nodes]):
        raise ValueError("Some nodes have backends different from '{}'".format(
            backend_obj.name))

    _tensors = []
    for t in tensors:
        if isinstance(t, network_components.BaseNode):
            _tensors.append(t.tensor)
        else:
            _tensors.append(t)

    nodes, con_edges, out_edges = ncon_network(_tensors,
                                               network_structure,
                                               con_order=con_order,
                                               out_order=out_order,
                                               backend=backend_obj)

    nodes = set(nodes)  # we don't need the ordering here

    # Reverse the list so we can pop from the end: O(1).
    con_edges = con_edges[::-1]
    while con_edges:
        nodes_to_contract = con_edges[-1].get_nodes()
        edges_to_contract = network_components.get_shared_edges(
            *nodes_to_contract)

        # Eat up all parallel edges that are adjacent in the ordering.
        adjacent_parallel_edges = set()
        for edge in reversed(con_edges):
            if edge in edges_to_contract:
                adjacent_parallel_edges.add(edge)
            else:
                break
        con_edges = con_edges[:-len(adjacent_parallel_edges)]

        # In an optimal ordering, all edges connecting a given pair of nodes are
        # adjacent in con_order. If this is not the case, warn the user.
        leftovers = edges_to_contract - adjacent_parallel_edges
        if leftovers:
            warnings.warn(
                "Suboptimal ordering detected. Edges {} are not adjacent in the "
                "contraction order to edges {}, connecting nodes {}. Deviating from "
                "the specified ordering!".format(
                    list(map(str, leftovers)),
                    list(map(str, adjacent_parallel_edges)),
                    list(map(str, nodes_to_contract))))
            con_edges = [e for e in con_edges if e not in edges_to_contract]

        if set(nodes_to_contract) == nodes:
            # This contraction produces the final output, so order the edges
            # here to avoid transposes in some cases.
            contraction_output_order = out_edges
        else:
            contraction_output_order = None

        nodes = nodes - set(nodes_to_contract)
        nodes.add(
            network_components.contract_between(
                *nodes_to_contract,
                name="con({},{})".format(*nodes_to_contract),
                output_edge_order=contraction_output_order))

    # TODO: More efficient ordering of products based on out_edges
    res_node = network_components.outer_product_final_nodes(nodes, out_edges)
    if all(are_nodes):
        return res_node
    return res_node.tensor
예제 #15
0
def ncon(
    tensors: Sequence[Union[network_components.BaseNode, Tensor]],
    network_structure: Sequence[Sequence],
    con_order: Optional[Sequence] = None,
    out_order: Optional[Sequence] = None,
    backend: Optional[Union[Text, BaseBackend]] = None
) -> Union[network_components.BaseNode, Tensor]:
    r"""Contracts a list of tensors or nodes according to a tensor network 
    specification.

    The network is provided as a list of lists, one for each
    tensor, specifying labels for the edges connected to that tensor.

    If a contraction order `con_order` and an output order `out_order`
    are both provided, the edge labels can be anything.
    Otherwise (`con_order == None or out_order == None`), the edge labels 
    must be nonzero integers and edges will be contracted in ascending order.
    Negative integers denote the (dangling) indices of the output tensor,
    which will be in descending order, e.g. `[-1,-2,-3,...]`.

    For example, matrix multiplication:

    .. code-block:: python

      A = np.array([[1.0, 2.0], [3.0, 4.0]])
      B = np.array([[1.0, 1.0], [0.0, 1.0]])
      ncon([A,B], [(-1, 1), (1, -2)])

    Matrix trace:

    .. code-block:: python

      A = np.array([[1.0, 2.0], [3.0, 4.0]])
      ncon([A], [(1, 1)]) # 5.0

    Note: 
      The reason `0` is not allowed as an edge label without manually
      specifying the contraction order is to maintain compatibility with the
      `original NCON implementation`_. However, the use of `0` in `con_order` 
      to denote outer products is not (currently) 
      supported in this implementation.
    
    .. _original NCON implementation:
      https://arxiv.org/abs/1402.0939

    Args:
      tensors: List of `Tensors` or `BaseNodes`.
      network_structure: List of lists specifying the tensor network structure.
      con_order: List of edge labels specifying the contraction order.
      out_order: List of edge labels specifying the output order.
      backend: String specifying the backend to use. Defaults to
        `tensornetwork.backend_contextmanager.get_default_backend`.

    Returns:
      The result of the contraction. The result is returned as a `Node`
      if all elements of `tensors` are `BaseNode` objects, else
      it is returned as a `Tensor` object.
    """
    if backend is None:
        backend = get_default_backend()
    if isinstance(backend, BaseBackend):
        backend_obj = backend
    else:
        backend_obj = backend_factory.get_backend(backend)

    are_nodes = [isinstance(t, network_components.BaseNode) for t in tensors]
    nodes = {t for t in tensors if isinstance(t, network_components.BaseNode)}
    if not all([n.backend.name == backend_obj.name for n in nodes]):
        raise ValueError("Some nodes have backends different from '{}'".format(
            backend_obj.name))

    _tensors = []
    for t in tensors:
        if isinstance(t, network_components.BaseNode):
            _tensors.append(t.tensor)
        else:
            _tensors.append(t)

    if backend not in _CACHED_JITTED_NCONS:
        _CACHED_JITTED_NCONS[backend] = backend_obj.jit(_jittable_ncon,
                                                        static_argnums=(1, 2,
                                                                        3, 4))
    res_tensor = _CACHED_JITTED_NCONS[backend](_tensors, network_structure,
                                               con_order, out_order,
                                               backend_obj)
    if all(are_nodes):
        return network_components.Node(res_tensor, backend=backend_obj)
    return res_tensor