def test_edge_initialize_raises_error_faulty_arguments(double_node_edge): node1 = double_node_edge.node1 node2 = double_node_edge.node2 with pytest.raises(ValueError): Edge(name="edge", node1=node1, node2=node2, axis1=0) with pytest.raises(ValueError): Edge(name="edge", node1=node1, axis1=0, axis2=0)
def test_edge_magic_xor(double_node_edge): node1 = double_node_edge.node1 node2 = double_node_edge.node2 edge1 = Edge(name="edge1", node1=node1, axis1=2) edge2 = Edge(name="edge2", node1=node2, axis1=2) edge = edge1 ^ edge2 assert edge.node1 == node1 assert edge.node2 == node2
def test_node_add_edge_raises_error_mismatch_rank(single_node_edge): node = single_node_edge.node edge = single_node_edge.edge with pytest.raises(ValueError): node.add_edge(edge, axis=-1) edge = Edge(name="edge", node1=node, axis1=0) with pytest.raises(ValueError): node.add_edge(edge, axis=3)
def fixture_double_node_edge(backend): net = tensornetwork.TensorNetwork(backend=backend) tensor = net.backend.convert_to_tensor(np.ones((1, 2, 2))) node1 = Node( tensor=tensor, name="test_node1", axis_names=["a", "b", "c"], network=net, backend=backend) node2 = Node( tensor=tensor, name="test_node2", axis_names=["a", "b", "c"], network=net, backend=backend) net.connect(node1["b"], node2["b"]) edge1 = Edge(name="edge", node1=node1, axis1=0) edge12 = Edge(name="edge", node1=node1, axis1=1, node2=node2, axis2=1) return DoubleNodeEdgeTensor(node1, node2, edge1, edge12, tensor)
def test_node_reorder_edges_raise_error_wrong_edges(single_node_edge): node = single_node_edge.node e0 = node[0] e1 = node[1] e2 = node[2] edge = Edge(name="edge", node1=node, axis1=0) with pytest.raises(ValueError) as e: node.reorder_edges([e0]) assert "Missing edges that belong to node found:" in str(e.value) with pytest.raises(ValueError) as e: node.reorder_edges([e0, e1, e2, edge]) assert "Additional edges that do not belong to node found:" in str(e.value)
def fixture_single_node_edge(backend): net = tensornetwork.TensorNetwork(backend=backend) tensor = np.ones((1, 2, 2)) tensor = net.backend.convert_to_tensor(tensor) node = Node( tensor=tensor, name="test_node", axis_names=["a", "b", "c"], backend=backend, network=net) edge = Edge(name="edge", node1=node, axis1=0) return SingleNodeEdgeTensor(node, edge, tensor)
def test_edge_load(tmp_path, double_node_edge): edge = double_node_edge.edge12 with h5py.File(tmp_path / 'edge', 'w') as edge_file: edge_group = edge_file.create_group('edge_data') edge_group.create_dataset('signature', data=edge.signature) edge_group.create_dataset('name', data=edge.name) edge_group.create_dataset('node1', data=edge.node1.name) edge_group.create_dataset('node2', data=edge.node2.name) edge_group.create_dataset('axis1', data=edge.axis1) edge_group.create_dataset('axis2', data=edge.axis2) net = tensornetwork.TensorNetwork(backend=edge.node1.network.backend.name) ten = net.backend.convert_to_tensor(np.ones((1, 2, 2))) node1 = Node( tensor=2 * ten, name="test_node1", axis_names=["a", "b", "c"], network=net, backend=net.backend.name) node2 = Node( tensor=ten, name="test_node2", axis_names=["a", "b", "c"], network=net, backend=net.backend.name) loaded_edge = Edge._load_edge(edge_group, { node1.name: node1, node2.name: node2 }) assert loaded_edge.name == edge.name assert loaded_edge.signature == edge.signature assert loaded_edge.node1.name == edge.node1.name assert loaded_edge.node2.name == edge.node2.name assert loaded_edge.axis1 == edge.axis1 assert loaded_edge.axis2 == edge.axis2 np.testing.assert_allclose(loaded_edge.node1.tensor, node1.tensor) np.testing.assert_allclose(loaded_edge.node2.tensor, node2.tensor)
def batched_contract_between( self, node1: tensornetwork.Node, node2: tensornetwork.Node, batch_edge1: tensornetwork.Edge, batch_edge2: tensornetwork.Edge) -> tensornetwork.Node: """Contract between that supports one batch edge in each node. Uses einsum property: "bij,bjk->bik". Args: node1: First node to contract. node2: Second node to contract. batch_edge1: The edge of node1 that correspond to its batch index. batch_edge2: The edge of node2 that correspond to its batch index. Returns: new_node: Result of the contraction. This node has by default batch_edge1 as its batch edge. Its edges are in the order of the dangling edges of node1 followed by the dangling edges of node2. """ if node1 is node2: raise ValueError("Cannot perform batched contraction between " "node '{}' and itself.".format(node1)) shared_edges = self.get_shared_edges(node1, node2) if not shared_edges: raise ValueError("No edges found between nodes " "'{}' and '{}'".format(node1, node2)) if batch_edge1 in shared_edges: raise ValueError( "Batch edge '{}' is shared between the nodes".format( batch_edge1)) if batch_edge2 in shared_edges: raise ValueError( "Batch edge '{}' is shared between the nodes".format( batch_edge2)) n_shared = len(shared_edges) shared_subscripts = dict( zip(shared_edges, self._VALID_SUBSCRIPTS[:n_shared])) res_string, string = [], [] index = n_shared + 1 for node, batch_edge in zip([node1, node2], [batch_edge1, batch_edge2]): string.append([]) for edge in node.edges: if edge in shared_edges: string[-1].append(shared_subscripts[edge]) elif edge is batch_edge: string[-1].append(self._VALID_SUBSCRIPTS[n_shared]) if node is node1: res_string.append(self._VALID_SUBSCRIPTS[n_shared]) else: string[-1].append(self._VALID_SUBSCRIPTS[index]) res_string.append(self._VALID_SUBSCRIPTS[index]) index += 1 string1 = "".join(string[0]) string2 = "".join(string[1]) res_string = "".join(res_string) einsum_string = "".join([string1, ",", string2, "->", res_string]) new_tensor = self.backend.einsum(einsum_string, node1.tensor, node2.tensor) new_node = self.add_node(new_tensor) # Modify batch edge 2 to avoid ValueError in remove batch_edge2.node2 = node1 batch_edge2._is_dangling = False shared_edges.add(batch_edge2) self._remove_edges(shared_edges, node1, node2, new_node) return new_node
def pairwise_reduction(net: BatchTensorNetwork, node: tensornetwork.Node, edge: tensornetwork.Edge) -> tensornetwork.Node: """Parallel contraction of matrix chains. The operation performed by this function is described in Fig. 4 of the paper `TensorNetwork for Machine Learning`. It leads to a more efficient implementation of the MPS classifier both in terms of predictions and automatic gradient calculation. The idea is that the whole MPS side is saved in memory as one node that carries an artificial "space" edge. This function removes this additional index by performing the pairwise contractions as shown in the Figure. Args: net: TensorNetwork that contains the node we want to reduce. node: Node to reduce pairwise. The corresponding tensor should have the form (..., space edge, ..., a, b) and matrix multiplications will be performed over the last two indices using matmul. edge: Space edge of the node. Returns: node: Node after the reduction. Has the shape of given node with the `edge` removed. """ # NOTE: This method could be included in the BatchedTensorNetwork class # however it seems better to be separated because (at least with the current # implementation) it performs a very specialized/non-general operation. # It also uses tf.matmul which restricts the backend, however this can be # easily generalized since all the backends support batched matmul. if not edge.is_dangling(): raise ValueError("Cannot reduce non-dangling edge '{}'".format(edge)) if edge.node1 is not node: raise ValueError("Edge '{}' does not belong to node '{}'".format( edge, node)) tensor = node.tensor size = int(tensor.shape[edge.axis1]) # Bring reduction edge in first position edge_order = list(range(len(list(tensor.shape)))) edge_order[0] = edge.axis1 edge_order[edge.axis1] = 0 tensor = net.backend.transpose(tensor, edge_order) # Remove edge to be reduced from node node.edges.pop(edge.axis1) for e in node.edges[edge.axis1:]: if e.node1 is e.node2: raise NotImplementedError("Cannot binary reduce node " "'{}' with trace edge '{}'".format( node, e)) if e.node1 is node: e.axis1 -= 1 else: e.axis2 -= 1 # Idea from this implementation is from jemisjoky/TorchMPS while size > 1: half_size = size // 2 nice_size = 2 * half_size leftover = tensor[nice_size:] tensor = tf.matmul(tensor[0:nice_size:2], tensor[1:nice_size:2]) tensor = net.backend.concat([tensor, leftover], axis=0) size = half_size + int(size % 2 == 1) node.tensor = tensor[0] return node
def test_edge_is_being_used_false(single_node_edge): node = single_node_edge.node edge2 = Edge(name="edge", node1=node, axis1=0) assert not edge2.is_being_used()
def test_edge_is_trace_true(single_node_edge): node = single_node_edge.node edge = Edge(name="edge", node1=node, axis1=1, node2=node, axis2=2) assert edge.is_trace()