예제 #1
0
 def decode(self, code, syndrome, **kwargs):
     """See :meth:`qecsim.model.Decoder.decode`"""
     # prepare recovery
     recovery_pauli = code.new_pauli()
     # ask code for plaquette_indices
     plaquette_indices = code.syndrome_to_plaquette_indices(syndrome)
     # for each lattice
     for lattice in (code.PRIMAL_INDEX, code.DUAL_INDEX):
         # prepare lattice graph
         l_graph = gt.SimpleGraph()
         # select lattice plaquettes
         l_plaquette_indices = [(la, r, c) for la, r, c in plaquette_indices
                                if la == lattice]
         # add weighted edges to lattice graph
         for a_index, b_index in itertools.combinations(
                 l_plaquette_indices, 2):
             # add edge with taxi-cab distance between a and b
             l_graph.add_edge(a_index, b_index,
                              self.distance(code, a_index, b_index))
         # find MWPM edges {(a, b), (c, d), ...}
         l_mates = gt.mwpm(l_graph)
         # iterate edges
         for a_index, b_index in l_mates:
             # add path to recover
             recovery_pauli.path(a_index, b_index)
     # return recover as bsf
     return recovery_pauli.to_bsf()
예제 #2
0
    def _cluster_graph(cls, code, time_steps, clusters):
        """Graph of cluster nodes and weighted edges consistent with the syndrome.

        Notes:

        * By construction, each cluster is either neutral (i.e. all defects fused) or defective (i.e. exactly one
          non-fused Y defect).
        * If there no defective clusters an empty graph is returned.
        * One node is added for each defective cluster.
        * Two nodes are added for each neutral cluster, provided that cluster consists of both X and Z plaquettes.
        * No node is added for any cluster that contains only X or only Z plaquettes.
        * Edges are added between all nodes with weight given by the cluster_distance function.

        :param code: Rotated planar code.
        :type code: RotatedPlanarCode
        :param time_steps: Number of time steps.
        :type time_steps: int
        :param clusters: List of clusters (directed paths of indices) as
            [[(t1, x1, y1), (t2, x2, y2), ..., (tn, xn, yn)], ...].
        :type clusters: list of list of (int, int)
        :return: Graph of weighted edges between cluster nodes, as {(a_node, b_node): weight, ...}.
        :rtype: dict of (_ClusterNode, _ClusterNode) edges to float weights.
        """
        # empty graph
        graph = gt.SimpleGraph()
        # build cluster nodes
        defective_cluster_nodes = []
        neutral_cluster_nodes = []
        for cluster in clusters:
            # split into x_path, z_path, y_defect
            x_path, z_path, y_defect = cls._cluster_to_paths_and_defect(
                code, cluster)
            # add cluster nodes to graph
            if y_defect:  # if cluster has non-fused Y-defect
                # unpack Y-defect indices
                x_defect_index, z_defect_index = y_defect
                # add as defective cluster
                cluster_node = cls._ClusterNode(cluster, x_defect_index,
                                                z_defect_index)
                defective_cluster_nodes.append(cluster_node)
            elif x_path and z_path:  # elif cluster has fused Y-defects
                # add twice as neutral cluster with representative X and Z indices
                neutral_cluster_nodes.append(
                    cls._ClusterNode(cluster, x_path[0], z_path[0]))
                neutral_cluster_nodes.append(
                    cls._ClusterNode(cluster, x_path[0], z_path[0]))
            else:  # else cluster has no Y-defects (fused or non-fused) so skip
                pass
        # if no defective cluster nodes then return empty graph
        if not defective_cluster_nodes:
            return graph
        # we should have an even number of defective cluster nodes
        assert len(defective_cluster_nodes) % 2 == 0
        # add edges to graph
        for a_node, b_node in itertools.combinations(
                defective_cluster_nodes + neutral_cluster_nodes, 2):
            graph.add_edge(
                a_node, b_node,
                cls._cluster_distance(code, time_steps, a_node, b_node))
        return graph
예제 #3
0
def test_simple_graph():
    graph = gt.SimpleGraph()
    graph.add_edge('a', 'b', 4)
    assert graph == {('a', 'b'): 4}
    graph.add_edge('a', 'b', 5)
    assert graph == {('a', 'b'): 5}
    graph.add_edge('b', 'a', 6)
    assert graph == {('b', 'a'): 6}
    graph.add_edge('a', 'b', 7)
    graph.add_edge('b', 'c', 8)
    assert graph == {('a', 'b'): 7, ('b', 'c'): 8}
예제 #4
0
 def decode(self, code, syndrome, **kwargs):
     """See :meth:`qecsim.model.Decoder.decode`"""
     # prepare recovery
     recovery_pauli = code.new_pauli()
     # get syndrome indices
     syndrome_indices = code.syndrome_to_plaquette_indices(syndrome)
     # split indices into primal and dual
     primal_indices = [i for i in syndrome_indices if code.is_primal(i)]
     dual_indices = [i for i in syndrome_indices if code.is_dual(i)]
     # extra virual indices are deliberately well off-boundary to be separate from nearest virtual indices
     primal_extra_vindex = (-9, -10)
     dual_extra_vindex = (-10, -9)
     # for each type of indices and extra virtual index
     for indices, extra_vindex in (primal_indices, primal_extra_vindex), (dual_indices, dual_extra_vindex):
         # prepare graph
         graph = gt.SimpleGraph()
         # prepare virtual nodes
         vindices = set()
         # add weighted edges between nodes and virtual nodes
         for index in indices:
             vindex = code.virtual_plaquette_index(index)
             vindices.add(vindex)
             distance = self.distance(code, index, vindex)
             graph.add_edge(index, vindex, distance)
         # add extra virtual node if odd number of total nodes
         if (len(indices) + len(vindices)) % 2:
             vindices.add(extra_vindex)
         # add weighted edges to graph between all (non-virtual) nodes
         for a_index, b_index in itertools.combinations(indices, 2):
             distance = self.distance(code, a_index, b_index)
             graph.add_edge(a_index, b_index, distance)
         # add zero weight edges between all virtual nodes
         for a_index, b_index in itertools.combinations(vindices, 2):
             graph.add_edge(a_index, b_index, 0)
         # find MWPM edges {(a, b), (c, d), ...}
         mates = gt.mwpm(graph)
         # iterate edges
         for a_index, b_index in mates:
             # add path to recover
             recovery_pauli.path(a_index, b_index)
     # return recover as bsf
     return recovery_pauli.to_bsf()
예제 #5
0
    def _cluster_graph(cls, code, time_steps, clusters):
        """Graph of cluster nodes and weighted edges consistent with the syndrome.

        Notes:

        * By construction, each cluster is either neutral (i.e. all defects fused) or defective (i.e. exactly one
          non-fused Y defect).
        * If there are no defective clusters an empty graph is returned.

        Algorithm: see class doc.

        :param code: Rotated planar code.
        :type code: RotatedPlanarCode
        :param time_steps: Number of time steps.
        :type time_steps: int
        :param clusters: List of clusters (directed paths of indices) as
            [[(t1, x1, y1), (t2, x2, y2), ..., (tn, xn, yn)], ...].
        :type clusters: list of list of (int, int)
        :return: Graph of weighted edges between cluster nodes, as {(a_node, b_node): weight, ...}.
        :rtype: dict of (_ClusterNode, _ClusterNode) edges to float weights.
        """
        # empty graph
        graph = gt.SimpleGraph()
        # build cluster nodes
        defective_cluster_nodes = []
        neutral_cluster_nodes = []
        for cluster in clusters:
            # split into x_path, z_path, y_defect
            x_path, z_path, y_defect = cls._cluster_to_paths_and_defect(code, cluster)
            # add cluster nodes to graph
            if y_defect:  # if cluster has non-fused Y-defect
                # unpack Y-defect indices
                x_defect_index, z_defect_index = y_defect
                # add as defective cluster
                cluster_node = cls._ClusterNode(cluster, x_defect_index, z_defect_index)
                defective_cluster_nodes.append(cluster_node)
            elif x_path and z_path:  # elif cluster has fused Y-defects
                # add twice as neutral cluster with representative X and Z indices
                neutral_cluster_nodes.append(cls._ClusterNode(cluster, x_path[0], z_path[0]))
                neutral_cluster_nodes.append(cls._ClusterNode(cluster, x_path[0], z_path[0]))
            else:  # else cluster has no Y-defects (fused or non-fused) so skip
                pass
        # if no defective cluster nodes then return empty graph
        if not defective_cluster_nodes:
            return graph
        # define extra virtual node (to join with corners) if odd number of cluster nodes
        extra_virtual_node = cls._ClusterNode(is_virtual=True) if len(defective_cluster_nodes) % 2 else None
        # add defective virtual clusters at corners with edge to extra virtual cluster
        for (x_x, x_y), (z_x, z_y) in cls._cluster_corner_indices(code):
            # loop through time
            for t in range(time_steps):
                x_index, z_index = (t, x_x, x_y), (t, z_x, z_y)
                corner_virtual_node = cls._ClusterNode([x_index, z_index], x_index, z_index, is_virtual=True)
                defective_cluster_nodes.append(corner_virtual_node)
                if extra_virtual_node:
                    # add virtual corner node and virtual extra node to graph with zero distance
                    graph.add_edge(corner_virtual_node, extra_virtual_node, 0)
        # add edges to graph
        for a_node, b_node in itertools.combinations(defective_cluster_nodes + neutral_cluster_nodes, 2):
            graph.add_edge(a_node, b_node, cls._cluster_distance(time_steps, a_node, b_node))
        return graph
예제 #6
0
    def _graph(cls, code, time_steps, syndrome, error_probability=None, measurement_error_probability=None, eta=None):
        """Graph of plaquette nodes and weighted edges consistent with the syndrome.

        Algorithm: see class doc.

        :param code: Rotated planar code.
        :type code: RotatedPlanarCode
        :param time_steps: Number of time steps.
        :type time_steps: int
        :param syndrome: Syndrome as binary array with (t, x, y) dimensions.
        :type syndrome: numpy.array (2d)
        :param error_probability: Error probability
            (optional if equal to measurement_error_probability and eta is None).
        :type error_probability: float or None
        :param measurement_error_probability: Measurement error probability
            (optional if equal to error_probability and eta is None).
        :type measurement_error_probability: float or None
        :param eta: Bias (a positive finite number or None for infinite bias), i.e. p_y / (p_x + p_z).
        :type eta: float or None
        :return: Graph of weighted edges between plaquette nodes, consistent with the syndrome,
            as {((a_t, a_x, a_y), a_is_row), (b_t, b_x, b_y), b_is_row)): weight, ...}.
        :rtype: dict of (((int, int, int), bool), ((int, int, int), bool)) edges to float weights.
        """
        # empty graph
        graph = gt.SimpleGraph()
        # get syndrome indices, as list of set where syndrome_indices[t] corresponds to time t
        syndrome_indices = [code.syndrome_to_plaquette_indices(s) for s in syndrome]
        # all plaquettes as (x, y)
        plaquette_indices = cls._plaquette_indices(code)

        def _add_edge(a_node, b_node):
            # unpack nodes
            ((a_t, a_x, a_y), a_is_row), ((b_t, b_x, b_y), b_is_row) = a_node, b_node
            # do not add edge between orthogonals
            if a_is_row != b_is_row:
                return
            # do not add edge between time steps if measurement probability is 0 or 1
            if measurement_error_probability in (0, 1) and a_t != b_t:
                return
            # do not add edge between space steps if error_probability is 0
            if error_probability == 0 and (a_x, a_y) != (b_x, b_y):
                return
            # do not add edge between distinct parallels if eta is None
            if eta is None and ((a_is_row and a_y != b_y) or (not a_is_row and a_x != b_x)):
                return
            # add edge to graph
            graph.add_edge(a_node, b_node, cls._distance(code, time_steps, a_node, b_node, error_probability,
                                                         measurement_error_probability, eta))

        def _add_to_graph(by_row):
            """Loop through lines of plaquette_indices adding nodes consistent with syndrome_indices with edges weighted
            according to distance function. by_row=True/False means process rows/columns."""

            # lattice_nodes (only populated for finite bias, i.e. eta is not None)
            lattice_nodes = []
            # loop through lines (rows if by_row, cols if not by_row)
            for line in plaquette_indices if by_row else plaquette_indices.T:
                # line list of nodes
                line_nodes = []
                # loop through indices on line
                for (x, y) in line:
                    # loop through time
                    for t in range(time_steps):
                        if code.is_virtual_plaquette((x, y)):
                            # add virtual node to line list
                            v_node = ((t, x, y), by_row)
                            line_nodes.append(v_node)
                            # add virtual node and orthogonal twin to graph with zero distance
                            v_node_twin = ((t, x, y), not by_row)
                            graph.add_edge(v_node, v_node_twin, 0)
                        else:
                            # if index in syndrome
                            if (x, y) in syndrome_indices[t]:
                                # add real node to line list
                                r_node = ((t, x, y), by_row)
                                line_nodes.append(r_node)
                if eta is None:  # if infinite bias
                    # add line edges to graph
                    for a_node, b_node in itertools.combinations(line_nodes, 2):
                        _add_edge(a_node, b_node)
                else:  # else finite bias
                    # add line nodes to lattice nodes
                    lattice_nodes.extend(line_nodes)
            # if bias is not infinite and we have some lattice nodes
            if eta and lattice_nodes:
                # add lattice edges to graph (note: lattice_nodes is empty if infinite bias)
                for a_node, b_node in itertools.combinations(lattice_nodes, 2):
                    _add_edge(a_node, b_node)

        # add nodes by row
        _add_to_graph(by_row=True)
        # add nodes by column
        _add_to_graph(by_row=False)
        return graph
예제 #7
0
    def _graphs(cls, code, time_steps, syndrome, error_probability=None, measurement_error_probability=None, eta=None):
        """Graphs of plaquette nodes and weighted edges consistent with the syndrome.

        Notes:

        * In the case of infinite bias, separate graphs are returned for each row and each column.
        * Nodes are added for all syndrome plaquettes in both "by row" and "by column" passes.
        * Edges are added between nodes on a given row or column; such edges are weighted by the distance function.

        :param code: Rotated toric code.
        :type code: RotatedToricCode
        :param time_steps: Number of time steps.
        :type time_steps: int
        :param syndrome: Syndrome as binary array with (t, x, y) dimensions.
        :type syndrome: numpy.array (2d)
        :param error_probability: Error probability
            (optional if equal to measurement_error_probability and eta is None).
        :type error_probability: float or None
        :param measurement_error_probability: Measurement error probability
            (optional if equal to error_probability and eta is None).
        :type measurement_error_probability: float or None
        :param eta: Bias (a positive finite number or None for infinite bias), i.e. p_y / (p_x + p_z).
        :type eta: float or None
        :return: List of graphs weighted edges between plaquette nodes, consistent with the syndrome, as
            {((a_t, a_x, a_y), a_is_row), (b_t, b_x, b_y), b_is_row)): weight, ...}.
        :rtype: generator of dict of (((int, int, int), bool), ((int, int, int), bool)) edges to float weights.
        """
        # list of lattice nodes (or list of list of line nodes in case of infinite bias)
        lattice_nodes = []
        # get syndrome indices, as list of set where syndrome_indices[t] corresponds to time t
        syndrome_indices = [code.syndrome_to_plaquette_indices(s) for s in syndrome]
        # all plaquettes as (x, y)
        plaquette_indices = cls._plaquette_indices(code)

        def _add_edge(graph, a_node, b_node):
            # unpack nodes
            ((a_t, a_x, a_y), a_is_row), ((b_t, b_x, b_y), b_is_row) = a_node, b_node
            # do not add edge between orthogonals
            if a_is_row != b_is_row:
                return
            # do not add edge between time steps if measurement probability is 0 or 1
            if measurement_error_probability in (0, 1) and a_t != b_t:
                return
            # do not add edge between space steps if error_probability is 0
            if error_probability == 0 and (a_x, a_y) != (b_x, b_y):
                return
            # do not add edge between distinct parallels if eta is None
            if eta is None and ((a_is_row and a_y != b_y) or (not a_is_row and a_x != b_x)):
                return
            # add edge to graph
            graph.add_edge(a_node, b_node, cls._distance(code, time_steps, a_node, b_node, error_probability,
                                                         measurement_error_probability, eta))

        # iterate by rows then by columns
        for by_row in (True, False):
            # loop through lines (rows if by_row, cols if not by_row)
            for line in plaquette_indices if by_row else plaquette_indices.T:
                # line nodes
                line_nodes = []
                # loop through indices on line
                for (x, y) in line:
                    # loop through time
                    for t in range(time_steps):
                        if (x, y) in syndrome_indices[t]:  # if index in syndrome
                            # add node to line nodes
                            node = ((t, x, y), by_row)
                            line_nodes.append(node)
                if line_nodes:  # if any line nodes
                    if eta is None:  # if infinite bias
                        # yield graph for line nodes
                        graph = gt.SimpleGraph()
                        for a_node, b_node in itertools.combinations(line_nodes, 2):
                            _add_edge(graph, a_node, b_node)
                        yield graph
                    else:  # else finite bias
                        # add line nodes to lattice nodes
                        lattice_nodes.extend(line_nodes)

        if lattice_nodes:  # if any lattice nodes
            if eta is not None:  # if finite bias
                # yield graph for lattice nodes
                graph = gt.SimpleGraph()
                for a_node, b_node in itertools.combinations(lattice_nodes, 2):
                    _add_edge(graph, a_node, b_node)
                yield graph
예제 #8
0
        def mwpm(self,
                 matched_indices,
                 syndrome_indices,
                 factor=3,
                 initial=1,
                 box_shape='t',
                 distance_algorithm=4):
            """
            Minimum-weight perfect matching of syndrome indices over a background of matched dual syndrome indices.

            Notes:

            * The background is set according to :meth:`set_background`.
            * A graph of the unmatched foreground indices is created, with appropriate virtual indices, and with edge
              weights given by :meth:`distance`.
            * A standard minimum-weight perfect matching is found in the graph.

            :param matched_indices: Matched pairs of background syndrome indices (dual to foreground).
            :type matched_indices: frozenset of 2-tuples of 2-tuple of int
            :param syndrome_indices: Unmatched foreground syndrome indices.
            :type syndrome_indices: frozenset of 2-tuple of int
            :param factor: Multiplication factor. (default=3)
            :type factor: int or float
            :param initial: Initial edge weight. (default=1)
            :type initial: int or float
            :param box_shape: Shape of background boxes. (default='t', 't'=tight, 'r'=rounded, 'f'=fitted, 'l'=loose)
            :type box_shape: str
            :param distance_algorithm: Distance algorithm. (default=4, 1=v+h, 2=min(v+h,h+v), 4=min(v+h,h+v,v+h+v,h+v+h)
            :type distance_algorithm: int
            :return: Minimum-weight perfect matching of foreground syndrome indices.
            :rtype: frozenset of 2-tuples of 2-tuple of int
            """
            # set grid background
            self.set_background(matched_indices,
                                factor=factor,
                                initial=initial,
                                box_shape=box_shape)
            # prepare graph
            graph = gt.SimpleGraph()
            # create lists of nodes and corresponding vnodes
            # NOTE: encapsulate indices in node objects that implement object reference equality since we may pass
            # multiple virtual plaquettes with the same index for matching.
            nodes, vnodes = [], []
            for index in syndrome_indices:
                nodes.append(self._Node(index))
                vnodes.append(
                    self._Node(self._code.virtual_plaquette_index(index)))
            # add weighted edges to graph
            for a_node, b_node in itertools.chain(
                    itertools.combinations(nodes, 2),  # all nodes to all nodes
                    itertools.combinations(vnodes,
                                           2),  # all vnodes to all vnodes
                    zip(nodes, vnodes)):  # each node to corresponding vnode
                # find weighted taxi-cab distance between a and b
                distance = self.distance(a_node.index,
                                         b_node.index,
                                         algorithm=distance_algorithm)
                # add edge with weight=distance
                graph.add_edge(a_node, b_node, distance)
            # find MWPM edges {(a, b), (c, d), ...}
            mates = gt.mwpm(graph)
            # convert to frozenset of sorted tuples {(a_index, b_index), ...}, removing matches if both indices virtual
            matches = frozenset(
                tuple(sorted((a.index, b.index))) for a, b in mates
                if self._code.is_in_bounds(a.index)
                or self._code.is_in_bounds(b.index))
            return matches