Example #1
0
def naive(
    net: network.TensorNetwork,
    edge_order: Optional[Sequence[network_components.Edge]] = None
) -> network.TensorNetwork:
    """Contract a TensorNetwork in the order the edges were created.

  This contraction method will usually be very suboptimal unless the edges were
  created in a deliberate way.

  Args:
    net: A TensorNetwork.
    edge_order: An optional list of edges. Must be equal to all non-dangling
      edges in the net.
  Returns:
    The given TensorNetwork with all non-dangling edges contracted.
  Raises:
    ValueError: If any of the edges originally created by `connect` have been
      contracted or flattened.
  """
    if edge_order is None:
        edge_order = net.edge_order
    if set(edge_order) != net.get_all_nondangling():
        raise ValueError(
            "Some non-dangling edges that were orginally created by "
            "`connect` are no longer in the graph. Please do NOT use"
            " any edge manipulation methods (contract, flatten, "
            "split_node, etc) before using the naive contractor.\n"
            "Original edges missing: {}.\n"
            "New edges found: {}".format(
                set(edge_order) - net.get_all_nondangling(),
                net.get_all_nondangling() - set(edge_order)))
    for edge in edge_order:
        if edge in net:
            net.contract_parallel(edge)
    return net
Example #2
0
def auto(net: network.TensorNetwork,
         memory_limit: Optional[int] = None) -> network.TensorNetwork:
  """Chooses one of the above algorithms according to network size.

  Default behavior is based on `opt_einsum`'s `auto` contractor.

  Args:
    net: a TensorNetwork object.
    memory_limit: Maximum number of elements in an array during contractions.

  Returns:
    The network after full contraction.
  """
  n = len(net.nodes_set)
  if n <= 0:
    raise ValueError("Cannot contract empty tensor network.")
  if n == 1:
    edges = net.get_all_nondangling()
    net.contract_parallel(edges.pop())
    return net
  if n < 5:
    return optimal(net, memory_limit)
  if n < 7:
    return branch(net, memory_limit)
  if n < 9:
    return branch(net, memory_limit, nbranch=2)
  if n < 15:
    return branch(net, nbranch=1)
  return greedy(net, memory_limit)
Example #3
0
def bucket(net: network.TensorNetwork,
           contraction_order: Sequence[network_components.CopyNode]
) -> network.TensorNetwork:
  """Contract given tensor network exploiting copy tensors.

  This is based on the Bucket-Elimination-based algorithm described in
  arXiv:quant-ph/1712.05384, but avoids explicit construction of the graphical
  model. Instead, it achieves the efficient contraction of sparse tensors by
  representing them as subnetworks consisting of lower rank tensors and copy
  tensors. This function assumes that sparse tensors have already been
  decomposed this way by the caller.

  This contractor is efficient on networks with many copy tensors. Time and
  memory requirements are highly sensitive to the requested contraction order.

  Note that the returned tensor network may not be fully contracted if the input
  network doesn't have enough copy nodes. In this case, the client should use
  a different contractor to complete the contraction.

  Args:
    net: A TensorNetwork.
    contraction_order: Order in which copy tensors are contracted.

  Returns:
    The given TensorNetwork with all copy tensors contracted.
  """
  for copy_node in contraction_order:
    net.contract_copy_node(copy_node)
  return net
def naive(
    net: network.TensorNetwork,
    edge_order: Optional[Sequence[network_components.Edge]] = None
) -> network.TensorNetwork:
    """Contract a TensorNetwork in the order the edges were created.

  This contraction method will usually be very suboptimal unless the edges were
  created in a deliberate way.

  Args:
    net: A TensorNetwork.
    edge_order: An optional list of edges. Must be equal to all non-dangling
      edges in the net.
  Returns:
    The given TensorNetwork with all non-dangling edges contracted.
  Raises:
    ValueError: If any of the edges originally created by `connect` have been
      contracted or flattened.
  """
    if edge_order is None:
        edge_order = sorted(net.get_all_nondangling())
    if set(edge_order) != net.get_all_nondangling():
        raise ValueError("Set of passed edges does not match expected set."
                         "Given: {}\nExpected: {}".format(
                             edge_order, net.get_all_nondangling()))
    for edge in edge_order:
        if edge in net:
            net.contract_parallel(edge)
    return net
def add_cnot(net: network.TensorNetwork, q0: network_components.Edge,
             q1: network_components.Edge
            ) -> Tuple[network_components.CopyNode, network_components
                       .Edge, network_components.Edge]:
  """Adds the CNOT quantum gate to tensor network.

  CNOT consists of two rank-3 tensors: a COPY tensor on the control qubit and
  a XOR tensor on the target qubit.

  Args:
    net: Tensor network to add CNOT to.
    q0: Input edge for the control qubit.
    q1: Input edge for the target qubit.

  Returns:
    Tuple with three elements:
    - copy tensor corresponding to the control qubit
    - output edge for the control qubit and
    - output edge for the target qubit.
  """
  control = net.add_copy_node(rank=3, dimension=2)
  xor = np.array([[[1, 0], [0, 1]], [[0, 1], [1, 0]]], dtype=np.float64)
  target = net.add_node(xor)
  net.connect(q0, control[0])
  net.connect(q1, target[0])
  net.connect(control[1], target[1])
  return control, control[2], target[2]
def test_backend_network(backend):
    a = np.random.randn(2, 2, 2)
    nodes, _, _ = ncon_interface.ncon_network([a, a, a],
                                              [(-1, 1, 2), (1, 2, 3),
                                               (3, -2, -3)],
                                              backend=backend)

    net = TensorNetwork(backend=backend)
    # pylint: disable=expression-not-assigned
    [net.add_node(n) for n in nodes]
    res = naive(net).get_final_node().tensor
    res_np = a.reshape((2, 4)) @ a.reshape((4, 2)) @ a.reshape((2, 4))
    res_np = res_np.reshape((2, 2, 2))
    np.testing.assert_allclose(res, res_np)
def stochastic(net: network.TensorNetwork,
               max_rejections: int,
               threshold: Optional[int] = None,
               none_value: int = 1) -> network.TensorNetwork:
    """Contracts a connected network by stochastically picking edges.

  Algorithm 2 in page 7 of https://doi.org/10.1371/journal.pone.0208510.
  Cost calculation is slightly modified here:
  If A and B are the tensors that share the given `edge`, cost is defined as:
  cost = dims(A * B) - max(dims(A), dims(B)), where
  * denotes contraction of all shared edges (`contract_parallel`) and
  dims(X) is the total dimension of tensor X (product of sizes of all axes).

  Args:
    net: Connected TensorNetwork to contract fully.
    max_rejections: Maximum number of rejections before you increase threshold.
    threshold: Initial value for the threshold.
    none_value: The value of None dimensions in the cost calculation.

  Returns:
    net: TensorNetwork with a single node after fully contracting.
  """
    net, node_sizes, node_sizes_none = contract_trace_edges(net, none_value)
    if threshold is None:
        # Set threshold as the maximum tensor size in the network
        # ignoring nodes with None sizes.
        threshold = max(node_sizes.values())
    node_sizes.update(node_sizes_none)

    rejections = 0
    nondangling_edges = net.get_all_nondangling()
    while nondangling_edges:
        edge = random.choice(tuple(nondangling_edges))
        shared_edges, shared_dim = find_parallel(edge)
        new_dim = ((node_sizes[edge.node1] // shared_dim) *
                   (node_sizes[edge.node2] // shared_dim))
        cost = new_dim - max(node_sizes[edge.node1], node_sizes[edge.node2])
        if cost <= threshold:
            node_sizes.pop(edge.node1)
            node_sizes.pop(edge.node2)
            node_sizes[net.contract_parallel(edge)] = new_dim
            nondangling_edges -= shared_edges
            rejections = 0
        else:
            rejections += 1
            if rejections > max_rejections:
                threshold *= 2
                rejections = 0
    return net
Example #8
0
def _get_path_network(net: TensorNetwork, algorithm: Algorithm
                     ) -> Tuple[List[Tuple[int, int]], List[BaseNode]]:
  """Calculates the contraction paths using `opt_einsum` methods.

  Args:
    net: TensorNetwork object to contract.
    algorithm: `opt_einsum` method to use for calculating the contraction path.

  Returns:
    The optimal contraction path as returned by `opt_einsum`.
  """
  sorted_nodes = sorted(net.nodes_set, key=lambda n: n.signature)

  input_sets = [set(node.edges) for node in sorted_nodes]
  output_set = net.get_all_edges() - net.get_all_nondangling()
  print(output_set)
  size_dict = {edge: edge.dimension for edge in net.get_all_edges()}

  return algorithm(input_sets, output_set, size_dict), sorted_nodes
Example #9
0
def contract_trace_edges(
    net: network.TensorNetwork,
    none_value: int = 1
) -> Tuple[network.TensorNetwork, Dict[network_components.BaseNode, int], Dict[
        network_components.BaseNode, int]]:
    """Contracts trace edges and calculate tensor sizes for every node.

  Tensor size is defined as the product of sizes of each of edges (axes).

  Args:
    net: TensorNetwork to contract all the trace edges of.
    none_value: The value that None dimensions contribute to the tensor size.
      Unit (default) means that None dimensions are neglected.

  Returns:
    A tuple containing:
      net: 
        Given TensorNetwork with all its trace edges contracted.
      node_sizes: 
        Map from nodes in the network to their total size.
      node_sizes_none: 
        Map from nodes that have at least one None dimension to
        their size.
  """
    # Keep node sizes in memory for cost calculation
    node_sizes, node_sizes_none = dict(), dict()
    initial_node_set = set(net.nodes_set)
    for node in initial_node_set:
        trace_edges, flag_none, total_dim = set(), False, 1
        new_node = node
        # makes sure node_edges points to the original edges
        # even after contracting the trace
        # pylint: disable=unnecessary-comprehension
        node_edges = [e for e in node.edges]
        node_dims = list(node.get_tensor().shape)
        for edge, dim in zip(node_edges, node_dims):
            if (not edge.is_disabled) and (edge.node1 is edge.node2):
                if edge not in trace_edges:
                    # Contract trace edge
                    new_node = net.contract(edge, name=node.name)
                    trace_edges.add(edge)
            elif edge.is_disabled:  #edge has been contracted; skip it
                continue
            else:
                if dim is None:
                    total_dim *= none_value
                    flag_none = True
                else:
                    total_dim *= dim
            if flag_none:
                node_sizes_none[new_node] = total_dim
            else:
                node_sizes[new_node] = total_dim
    return net, node_sizes, node_sizes_none
Example #10
0
def base(net: network.TensorNetwork,
         algorithm: Callable[[List[Set[int]], Set[int], Dict[int, int]], List]
        ) -> network.TensorNetwork:
  """Base method for all `opt_einsum` contractors.

  Args:
    net: a TensorNetwork object. Should be connected.
    algorithm: `opt_einsum` contraction method to use.

  Returns:
    The network after full contraction.
  """
  net.check_connected()
  # First contract all trace edges
  edges = net.get_all_nondangling()
  for edge in edges:
    if edge in net and edge.is_trace():
      net.contract_parallel(edge)
  if not net.get_all_nondangling():
    # There's nothing to contract.
    return net

  # Then apply `opt_einsum`'s algorithm
  nodes = sorted(net.nodes_set)
  input_sets = utils.get_input_sets(net)
  output_set = utils.get_output_set(net)
  size_dict = utils.get_size_dict(net)
  path = algorithm(input_sets, output_set, size_dict)
  for a, b in path:
    new_node = nodes[a] @ nodes[b]
    nodes.append(new_node)
    nodes = utils.multi_remove(nodes, [a, b])
  return net
def contract_trace_edges(
    net: network.TensorNetwork,
    none_value: int = 1
) -> Tuple[network.TensorNetwork, Dict[network_components.Node, int], Dict[
        network_components.Node, int]]:
    """Contracts trace edges and calculate tensor sizes for every node.

  Tensor size is defined as the product of sizes of each of edges (axes).

  Args:
    net: TensorNetwork to contract all the trace edges of.
    none_value: The value that None dimensions contribute to the tensor size.
      Unit (default) means that None dimensions are neglected.

  Returns:
    A tuple containing:
      net: 
        Given TensorNetwork with all its trace edges contracted.
      node_sizes: 
        Map from nodes in the network to their total size.
      node_sizes_none: 
        Map from nodes that have at least one None dimension to
        their size.
  """
    # Keep node sizes in memory for cost calculation
    node_sizes, node_sizes_none = dict(), dict()
    initial_node_set = set(net.nodes_set)
    for node in initial_node_set:
        trace_edges, flag_none, total_dim = set(), False, 1
        new_node = node
        for edge, dim in zip(node.edges, list(node.get_tensor().shape)):
            if edge.node1 is edge.node2:
                if edge not in trace_edges:
                    # Contract trace edge
                    new_node = net.contract(edge)
                    trace_edges.add(edge)
            else:
                if dim is None:
                    total_dim *= none_value
                    flag_none = True
                else:
                    total_dim *= dim
            if flag_none:
                node_sizes_none[new_node] = total_dim
            else:
                node_sizes[new_node] = total_dim
    return net, node_sizes, node_sizes_none
Example #12
0
def load(path: str):
    """Load a tensor network from disk.

  Args:
    path: path to file where network is saved.
  """
    with h5py.File(path, 'r') as net_file:
        net = TensorNetwork(backend=net_file["backend"][()])
        node_names = list(net_file["nodes"].keys())
        edge_names = list(net_file["edges"].keys())

        for node_name in node_names:
            node_data = net_file["nodes/" + node_name]
            node_type = get_component(node_data['type'][()])
            node_type._load_node(net, node_data)

        nodes_dict = {node.name: node for node in net.nodes_set}

        for edge in edge_names:
            edge_data = net_file["edges/" + edge]
            Edge._load_edge(edge_data, nodes_dict)
    return net
Example #13
0
def greedy(net: network.TensorNetwork) -> network.TensorNetwork:
  """Contract the lowest cost pair of nodes first.
  
  Args:
    net: The TensorNetwork to contract.

  Returns:
    The contracted TensorNetwork.
  """
  edges = net.get_all_nondangling()
  # First, contract all of the trace edges.
  for edge in edges:
    if edge in net and edge.is_trace():
      net.contract_parallel(edge)
  # Get the edges again.
  edges = net.get_all_nondangling()
  while edges:
    edge = min(edges, key=lambda x: (cost_contract_parallel(x), x))
    net.contract_parallel(edge)
    edges = net.get_all_nondangling()
  return net
def auto(net: network.TensorNetwork,
         output_edge_order: Sequence[network_components.Edge] = None,
         memory_limit: Optional[int] = None) -> network.TensorNetwork:
    """Chooses one of the above algorithms according to network size.

  Default behavior is based on `opt_einsum`'s `auto` contractor.

  Args:
    net: a TensorNetwork object.
    output_edge_order: An optional list of edges. 
      Edges of the final node in `nodes_set` 
      are reordered into `output_edge_order`; 
      if final node has more than one edge, 
      `output_edge_order` must be provided.
    memory_limit: Maximum number of elements in an array during contractions.

  Returns:
    The network after full contraction.
  """
    n = len(net.nodes_set)
    if n <= 0:
        raise ValueError("Cannot contract empty tensor network.")
    if n == 1:
        edges = net.get_all_nondangling()
        net.contract_parallel(edges.pop())
        final_node = net.get_final_node()
        if (len(final_node.edges) <= 1) and (output_edge_order is None):
            output_edge_order = list(
                (net.get_all_edges() - net.get_all_nondangling()))
        elif (len(final_node.edges) > 1) and (output_edge_order is None):
            raise ValueError(
                "if the final node has more than one dangling edge"
                ", `output_edge_order` has to be provided")

        final_node.reorder_edges(output_edge_order)
        return net
    if n < 5:
        return optimal(net, output_edge_order, memory_limit)
    if n < 7:
        return branch(net, output_edge_order, memory_limit)
    if n < 9:
        return branch(net, output_edge_order, memory_limit, nbranch=2)
    if n < 15:
        return branch(net, output_edge_order, nbranch=1)
    return greedy(net, output_edge_order, memory_limit)
Example #15
0
def get_size_dict(net: network.TensorNetwork) -> Dict[int, int]:
    return {edge: edge.dimension for edge in net.get_all_edges()}
def base(
    net: network.TensorNetwork,
    algorithm: Callable[[List[Set[int]], Set[int], Dict[int, int]], List],
    output_edge_order: Optional[Sequence[network_components.Edge]] = None
) -> network.TensorNetwork:
    """Base method for all `opt_einsum` contractors.

  Args:
    net: a TensorNetwork object. Should be connected.
    algorithm: `opt_einsum` contraction method to use.
    output_edge_order: An optional list of edges. Edges of the 
      final node in `nodes_set` 
      are reordered into `output_edge_order`; 
      if final node has more than one edge, 
      `output_edge_order` must be provided.
  Returns:
    The network after full contraction.
  """

    net.check_connected()
    # First contract all trace edges
    edges = net.get_all_nondangling()
    for edge in edges:
        if edge in net and edge.is_trace():
            net.contract_parallel(edge)
    if not net.get_all_nondangling():
        # There's nothing to contract.
        return net

    # Then apply `opt_einsum`'s algorithm
    nodes = sorted(net.nodes_set)
    input_sets = utils.get_input_sets(net)
    output_set = utils.get_output_set(net)
    size_dict = utils.get_size_dict(net)
    path = algorithm(input_sets, output_set, size_dict)
    for a, b in path:
        new_node = nodes[a] @ nodes[b]
        nodes.append(new_node)
        nodes = utils.multi_remove(nodes, [a, b])

    # if the final node has more than one edge,
    # output_edge_order has to be specified
    final_node = net.get_final_node()
    if (len(final_node.edges) <= 1) and (output_edge_order is None):
        output_edge_order = list(
            (net.get_all_edges() - net.get_all_nondangling()))
    elif (len(final_node.edges) > 1) and (output_edge_order is None):
        raise ValueError("if the final node has more than one dangling edge"
                         " `output_edge_order` has to be provided")

    if set(output_edge_order) != (net.get_all_edges() -
                                  net.get_all_nondangling()):
        raise ValueError("output edges are not all dangling.")

    final_node.reorder_edges(output_edge_order)
    return net
def _base_network(
        net: TensorNetwork,
        algorithm: utils.Algorithm,
        output_edge_order: Optional[Sequence[Edge]] = None) -> TensorNetwork:
    """Base method for all `opt_einsum` contractors.

  Args:
    net: a TensorNetwork object. Should be connected.
    algorithm: `opt_einsum` contraction method to use.
    output_edge_order: An optional list of edges. Edges of the
      final node in `nodes_set`
      are reordered into `output_edge_order`;
      if final node has more than one edge,
      `output_edge_order` must be provided.

  Returns:
    The network after full contraction.
  """
    net.check_connected()
    # First contract all trace edges
    edges = net.get_all_nondangling()
    for edge in edges:
        if edge in net and edge.is_trace():
            net.contract_parallel(edge)
    if not net.get_all_nondangling():
        # There's nothing to contract.
        return net

    # Then apply `opt_einsum`'s algorithm
    path, nodes = utils.get_path(net, algorithm)
    for a, b in path:
        new_node = nodes[a] @ nodes[b]
        nodes.append(new_node)
        nodes = utils.multi_remove(nodes, [a, b])

    # if the final node has more than one edge,
    # output_edge_order has to be specified
    final_node = net.get_final_node()
    if (len(final_node.edges) <= 1) and (output_edge_order is None):
        output_edge_order = list(
            (net.get_all_edges() - net.get_all_nondangling()))
    elif (len(final_node.edges) > 1) and (output_edge_order is None):
        raise ValueError("The final node after contraction has more than "
                         "one dangling edge. In this case `output_edge_order` "
                         "has to be provided.")
    if set(output_edge_order) != (net.get_all_edges() -
                                  net.get_all_nondangling()):
        raise ValueError("output edges are not all dangling.")

    final_node.reorder_edges(output_edge_order)
    return net
Example #18
0
def get_output_set(net: network.TensorNetwork) -> Set[int]:
    dangling_edges = net.get_all_edges() - net.get_all_nondangling()
    return set(dangling_edges)