def add_node( self, tensor: Union[np.ndarray, Tensor], name: Optional[Text] = None, axis_names: Optional[List[Text]] = None ) -> network_components.Node: """Create a new node in the network. Args: tensor: The concrete tensor for the node. name: The name of the new node. If None, a name will be generated automatically. axis_names: Optional list of strings to name each of the axes. Returns: new_node: The new node object. Raises: ValueError: If `name` already exists in the network. """ tensor = self.backend.convert_to_tensor(tensor) name = self._new_node_name(name) if axis_names is None: axis_names = [ self._new_edge_name(None) for _ in range(len(tensor.shape)) ] new_node = network_components.Node(tensor, name, axis_names, self.backend) new_node.set_signature(self.node_increment) self.nodes_set.add(new_node) return new_node
def _build_network( tensors: Sequence[Tensor], network_structure: Sequence[Sequence], backend: Text ) -> Tuple[List[network_components.BaseNode], Dict[Any, network_components.Edge]]: nodes = [] edges = {} for i, (tensor, edge_lbls) in enumerate(zip(tensors, network_structure)): if len(tensor.shape) != len(edge_lbls): raise ValueError( "Incorrect number of edge labels specified tensor {}".format( i)) if isinstance(tensor, network_components.BaseNode): node = tensor else: node = network_components.Node(tensor, name="tensor_{}".format(i), backend=backend) nodes.append(node) for (axis_num, edge_lbl) in enumerate(edge_lbls): if edge_lbl not in edges: e = node[axis_num] e.set_name(str(edge_lbl)) edges[edge_lbl] = e else: # This will raise an error if the edges are not dangling. e = network_components.connect(edges[edge_lbl], node[axis_num], name=str(edge_lbl)) edges[edge_lbl] = e return nodes, edges
def add_node( self, value: Union[np.ndarray, Tensor, network_components.BaseNode], name: Optional[Text] = None, axis_names: Optional[List[Text]] = None) -> network_components.BaseNode: """Create a new node in the network. Args: value: Either the concrete tensor or an existing `Node` object that has no associated `TensorNetwork`. If a concrete tensor is given, a new node will be created. name: The name of the new node. If None, a name will be generated automatically. axis_names: Optional list of strings to name each of the axes. Returns: The new node object. Raises: ValueError: If `name` already exists in the network. """ given_axis_name = axis_names is not None given_node_name = name is not None if axis_names is None: axis_names = [self._new_edge_name(None) for _ in range(len(value.shape))] name = self._new_node_name(name) if isinstance(value, network_components.BaseNode): new_node = value if new_node.network is not None: raise ValueError("Given node is already part of a network.") if new_node.backend.name != self.backend.name: raise ValueError( "Given node '{}' has Node.backend.name='{}' different from TensorNetwork.backend.name='{}'." .format(new_node.name, new_node.backend.name, self.backend.name)) new_node.network = self if new_node.axis_names is None or given_axis_name: new_node.axis_names = axis_names if new_node.name is None or given_node_name: new_node.name = name else: value = self.backend.convert_to_tensor(value) if self.backend.dtype is None: self.backend.dtype = value.dtype new_node = network_components.Node( value, name, axis_names, backend=self.backend.name, network=self) new_node.set_signature(self.node_increment) self.nodes_set.add(new_node) return new_node
def initialize_node(fname: Text, *fargs: Any, name: Optional[Text] = None, axis_names: Optional[List[Text]] = None, backend: Optional[Union[Text, BaseBackend]] = None, **fkwargs: Any) -> Tensor: """Return a Node wrapping data obtained by an initialization function implemented in a backend. The Node will have the same shape as the underlying array that function generates, with all Edges dangling. This function is not intended to be called directly, but doing so should be safe enough. Args: fname: Name of the method of backend to call (a string). *fargs: Positional arguments to the initialization method. name: Optional name of the Node. axis_names: Optional names of the Node's dangling edges. backend: The backend or its name. **fkwargs: Keyword arguments to the initialization method. Returns: node: A Node wrapping data generated by (the_backend).fname(*fargs, **fkwargs), with one dangling edge per axis of data. """ if backend is None: backend_obj = backend_contextmanager.get_default_backend() else: backend_obj = backends.backend_factory.get_backend(backend) func = getattr(backend_obj, fname) data = func(*fargs, **fkwargs) node = network_components.Node(data, name=name, axis_names=axis_names, backend=backend) return node
def ncon( tensors: Sequence[Union[network_components.AbstractNode, Tensor]], network_structure: Sequence[Sequence[Union[str, int]]], con_order: Optional[Sequence] = None, out_order: Optional[Sequence] = None, check_network: bool = True, backend: Optional[Union[Text, AbstractBackend]] = None ) -> Union[network_components.AbstractNode, Tensor]: r"""Contracts a list of tensors or nodes according to a tensor network specification. The network is provided as a list of lists, one for each tensor, specifying the labels for the edges connected to that tensor. Labels can be any numbers or strings. Negative number-type labels and string-type labels with a prepended hyphen ('-') are open labels and remain uncontracted. Positive number-type labels and string-type labels with no prepended hyphen ('-') are closed labels and are contracted. Any open label appearing more than once is treated as an open batch label. Any closed label appearing more than once is treated as a closed batch label. Upon finishing the contraction, all open batch labels will have been collapsed into a single dimension, and all closed batch labels will have been summed over. If `out_order = None`, output labels are ordered according to descending number ordering and ascending ASCII ordering, with number labels always appearing before string labels. Example: network_structure = [[-1, 1, '-rick', '2',-2], [-2, '2', 1, '-morty']] results in an output order of [-1, -2, '-morty', '-rick']. If `out_order` is given, the indices of the resulting tensor will be transposed into this order. If `con_order = None`, `ncon` will first contract all number labels in ascending order followed by all string labels in ascending ASCII order. If `con_order` is given, `ncon` will contract according to this order. For example, matrix multiplication: .. code-block:: python A = np.array([[1.0, 2.0], [3.0, 4.0]]) B = np.array([[1.0, 1.0], [0.0, 1.0]]) ncon([A,B], [(-1, 1), (1, -2)]) Matrix trace: .. code-block:: python A = np.array([[1.0, 2.0], [3.0, 4.0]]) ncon([A], [(1, 1)]) # 5.0 Note: Disallowing `0` as an edge label is legacy behaviour, see `original NCON implementation`_. .. _original NCON implementation: https://arxiv.org/abs/1402.0939 Args: tensors: List of `Tensors` or `AbstractNodes`. network_structure: List of lists specifying the tensor network structure. con_order: List of edge labels specifying the contraction order. out_order: List of edge labels specifying the output order. check_network: Boolean flag. If `True` check the network. backend: String specifying the backend to use. Defaults to `tensornetwork.backend_contextmanager.get_default_backend`. Returns: The result of the contraction. The result is returned as a `Node` if all elements of `tensors` are `AbstractNode` objects, else it is returned as a `Tensor` object. """ # TODO (mganahl): for certain cases np.einsum is still faster than ncon: # - contractions containing batched outer products with small dimensions # This should eventually be fixed, but it's not a priority. if backend is None: backend = get_default_backend() if isinstance(backend, AbstractBackend): backend_obj = backend else: backend_obj = backend_factory.get_backend(backend) if out_order == []: #allow empty list as input out_order = None if con_order == []: #allow empty list as input con_order = None are_nodes = [ isinstance(t, network_components.AbstractNode) for t in tensors ] nodes = { t for t in tensors if isinstance(t, network_components.AbstractNode) } if not all([n.backend.name == backend_obj.name for n in nodes]): raise ValueError("Some nodes have backends different from '{}'".format( backend_obj.name)) _tensors = [] for t in tensors: if isinstance(t, network_components.AbstractNode): _tensors.append(t.tensor) else: _tensors.append(t) _tensors = [backend_obj.convert_to_tensor(t) for t in _tensors] if check_network: _check_network(network_structure, [t.shape for t in _tensors], con_order, out_order) network_structure, mapping = _canonicalize_network_structure( network_structure) flat_labels = [l for sublist in network_structure for l in sublist] unique_flat_labels = list(set(flat_labels)) if out_order is None: # negative batch labels (negative labels appearing more than once) # are subject to the same output ordering as regular output labels out_order = sorted([l for l in unique_flat_labels if l < 0], reverse=True) else: out_order = [mapping[o] for o in out_order] if con_order is None: # canonicalization of network structure takes care of appropriate # contraction ordering (i.e. use ASCII ordering for str and # regular ordering for int) # all positive labels appearing are considered proper contraction labels. con_order = sorted([l for l in unique_flat_labels if l > 0]) else: con_order = [mapping[o] for o in con_order] if backend not in _CACHED_JITTED_NCONS: _CACHED_JITTED_NCONS[backend] = backend_obj.jit(_jittable_ncon, static_argnums=(1, 2, 3, 4, 5)) sizes = tuple([len(l) for l in network_structure]) res_tensor = _CACHED_JITTED_NCONS[backend](_tensors, tuple(flat_labels), sizes, tuple(con_order), tuple(out_order), backend_obj) if all(are_nodes): return network_components.Node(res_tensor, backend=backend_obj) return res_tensor
def ncon( tensors: Sequence[Union[network_components.BaseNode, Tensor]], network_structure: Sequence[Sequence], con_order: Optional[Sequence] = None, out_order: Optional[Sequence] = None, check_network: bool = True, backend: Optional[Union[Text, BaseBackend]] = None ) -> Union[network_components.BaseNode, Tensor]: r"""Contracts a list of tensors or nodes according to a tensor network specification. The network is provided as a list of lists, one for each tensor, specifying labels for the edges connected to that tensor. Labels appearing only once in `network_structure` (open labels) remain uncontracted, labels appearing twice (contracted labels) are contracted over. If `out_order = None`, output labels can either be negative numbers or strings with a hyphen character ('-') prepended, e.g. '-out_label_1'. If `out_order = None` output labels are ordered according to descending number ordering and ascending ASCII ordering, with number labels always appearing before string labels. Example: network_structure = [[-1, 1, '-3', '2'], [-2, '2', 1, '-33']] results in an output order of [-1, -2, '-3', '-33']. If `out_order` is given, the indices of the resulting tensor will be transposed into this order. In this case output labels can be arbitrary numbers and arbitrary strings (no minus or hyphen necessary). If `con_order = None`, `ncon` will first contract all number labels in ascending order followed by all string labels in ascending ASCII order. If `con_order` is given, `ncon` will contract according to this order. For example, matrix multiplication: .. code-block:: python A = np.array([[1.0, 2.0], [3.0, 4.0]]) B = np.array([[1.0, 1.0], [0.0, 1.0]]) ncon([A,B], [(-1, 1), (1, -2)]) Matrix trace: .. code-block:: python A = np.array([[1.0, 2.0], [3.0, 4.0]]) ncon([A], [(1, 1)]) # 5.0 Note: The reason `0` is not allowed as an edge label without manually specifying the contraction order is to maintain compatibility with the `original NCON implementation`_. However, the use of `0` in `con_order` to denote outer products is not (currently) supported in this implementation. .. _original NCON implementation: https://arxiv.org/abs/1402.0939 Args: tensors: List of `Tensors` or `BaseNodes`. network_structure: List of lists specifying the tensor network structure. con_order: List of edge labels specifying the contraction order. out_order: List of edge labels specifying the output order. check_network: Boolean flag. If `True` check the network. backend: String specifying the backend to use. Defaults to `tensornetwork.backend_contextmanager.get_default_backend`. Returns: The result of the contraction. The result is returned as a `Node` if all elements of `tensors` are `BaseNode` objects, else it is returned as a `Tensor` object. """ if backend is None: backend = get_default_backend() if isinstance(backend, BaseBackend): backend_obj = backend else: backend_obj = backend_factory.get_backend(backend) if out_order == []: #allow empty list as input out_order = None if con_order == []: #allow empty list as input con_order = None # convert to lists network_structure = [list(l) for l in network_structure] are_nodes = [isinstance(t, network_components.BaseNode) for t in tensors] nodes = {t for t in tensors if isinstance(t, network_components.BaseNode)} if not all([n.backend.name == backend_obj.name for n in nodes]): raise ValueError("Some nodes have backends different from '{}'".format( backend_obj.name)) _tensors = [] for t in tensors: if isinstance(t, network_components.BaseNode): _tensors.append(t.tensor) else: _tensors.append(t) _tensors = [backend_obj.convert_to_tensor(t) for t in _tensors] if check_network: cont_labels, out_labels = _check_network(network_structure, [t.shape for t in _tensors], con_order, out_order) else: # map the network strcuture to integers; if any of the labels is a `str` # type, the ordering defaults to string-ordering, i.e. # [[1, 2, '12'], [1, 9]] -> [[1, -2, -1],[1, -3]] # pylint: disable=line-too-long int_cont_labels, str_cont_labels, int_out_labels, str_out_labels = _get_cont_out_labels( network_structure) cont_labels = int_cont_labels + str_cont_labels out_labels = int_out_labels + str_out_labels network_structure, mapping = _canonicalize_network_structure( cont_labels, out_labels, network_structure) network_structure = [np.array(l) for l in network_structure] flat_connections = np.concatenate(network_structure) if out_order is None: out_order = np.sort(flat_connections[flat_connections < 0])[::-1] else: l = [] for o in out_order: l.append(mapping[o]) out_order = np.array(l) if con_order is None: #canonicalization of network structure takes care of appropriate #contraction ordering (i.e. use ASCII ordering for str and #regular ordering for int) con_order = np.unique(flat_connections[flat_connections > 0]) else: l = [] for o in con_order: l.append(mapping[o]) con_order = np.array(l) if backend not in _CACHED_JITTED_NCONS: _CACHED_JITTED_NCONS[backend] = backend_obj.jit( _jittable_ncon, static_argnums=(1, 2, 3, 4)) res_tensor = _CACHED_JITTED_NCONS[backend](_tensors, network_structure, con_order, out_order, backend_obj) if all(are_nodes): return network_components.Node(res_tensor, backend=backend_obj) return res_tensor
def ncon( tensors: Sequence[Union[network_components.BaseNode, Tensor]], network_structure: Sequence[Sequence], con_order: Optional[Sequence] = None, out_order: Optional[Sequence] = None, backend: Optional[Union[Text, BaseBackend]] = None ) -> Union[network_components.BaseNode, Tensor]: r"""Contracts a list of tensors or nodes according to a tensor network specification. The network is provided as a list of lists, one for each tensor, specifying labels for the edges connected to that tensor. If a contraction order `con_order` and an output order `out_order` are both provided, the edge labels can be anything. Otherwise (`con_order == None or out_order == None`), the edge labels must be nonzero integers and edges will be contracted in ascending order. Negative integers denote the (dangling) indices of the output tensor, which will be in descending order, e.g. `[-1,-2,-3,...]`. For example, matrix multiplication: .. code-block:: python A = np.array([[1.0, 2.0], [3.0, 4.0]]) B = np.array([[1.0, 1.0], [0.0, 1.0]]) ncon([A,B], [(-1, 1), (1, -2)]) Matrix trace: .. code-block:: python A = np.array([[1.0, 2.0], [3.0, 4.0]]) ncon([A], [(1, 1)]) # 5.0 Note: The reason `0` is not allowed as an edge label without manually specifying the contraction order is to maintain compatibility with the `original NCON implementation`_. However, the use of `0` in `con_order` to denote outer products is not (currently) supported in this implementation. .. _original NCON implementation: https://arxiv.org/abs/1402.0939 Args: tensors: List of `Tensors` or `BaseNodes`. network_structure: List of lists specifying the tensor network structure. con_order: List of edge labels specifying the contraction order. out_order: List of edge labels specifying the output order. backend: String specifying the backend to use. Defaults to `tensornetwork.backend_contextmanager.get_default_backend`. Returns: The result of the contraction. The result is returned as a `Node` if all elements of `tensors` are `BaseNode` objects, else it is returned as a `Tensor` object. """ if backend is None: backend = get_default_backend() if isinstance(backend, BaseBackend): backend_obj = backend else: backend_obj = backend_factory.get_backend(backend) are_nodes = [isinstance(t, network_components.BaseNode) for t in tensors] nodes = {t for t in tensors if isinstance(t, network_components.BaseNode)} if not all([n.backend.name == backend_obj.name for n in nodes]): raise ValueError("Some nodes have backends different from '{}'".format( backend_obj.name)) _tensors = [] for t in tensors: if isinstance(t, network_components.BaseNode): _tensors.append(t.tensor) else: _tensors.append(t) if backend not in _CACHED_JITTED_NCONS: _CACHED_JITTED_NCONS[backend] = backend_obj.jit(_jittable_ncon, static_argnums=(1, 2, 3, 4)) res_tensor = _CACHED_JITTED_NCONS[backend](_tensors, network_structure, con_order, out_order, backend_obj) if all(are_nodes): return network_components.Node(res_tensor, backend=backend_obj) return res_tensor