def test_bad_syndrome_raises_value_error(): g = nx.Graph() g.add_edge(0, 1, qubit_id=0) g.add_edge(1, 2, qubit_id=1) m = Matching(g) with pytest.raises(ValueError): noise = m.decode('test')
def test_boundaries_from_networkx(): g = nx.Graph() g.add_edge(0,1, qubit_id=0) g.add_edge(1,2, qubit_id=1) g.add_edge(2,3, qubit_id=2) g.add_edge(3,4, qubit_id=3) g.add_edge(4,5, qubit_id=4) g.add_edge(0,5, qubit_id=-1, weight=0.0) g.nodes()[0]['is_boundary'] = True g.nodes()[5]['is_boundary'] = True m = Matching(g) assert m.boundary == [0,5] assert np.array_equal(m.decode(np.array([0,1,0,0,0,0])), np.array([1,0,0,0,0])) assert np.array_equal(m.decode(np.array([0,0,1,0,0])), np.array([1,1,0,0,0])) assert np.array_equal(m.decode(np.array([0,0,1,1,0])), np.array([0,0,1,0,0])) assert np.array_equal(m.decode(np.array([0,0,0,1,0])), np.array([0,0,0,1,1]))
def test_local_matching_connected(cluster_size): g = nx.Graph() qid = 0 for i in range(cluster_size): g.add_edge(i, i + 1, weight=1.0, qubit_id=qid) qid += 1 g.add_edge(cluster_size, cluster_size + 1, weight=2 * cluster_size, qubit_id=qid) qid += 1 for i in range(cluster_size + 1, 2 * cluster_size + 1): g.add_edge(i, i + 1, weight=1.0, qubit_id=qid) qid += 1 m = Matching(g) m.decode([1] * (cluster_size + 1) * 2, num_neighbours=cluster_size)
def test_decode_all_neighbours(): g = nx.Graph() g.add_edge(0, 1, qubit_id=0) g.add_edge(1, 2, qubit_id=1) m = Matching(g) noise = m.decode([1,0,1], num_neighbours=None) assert np.array_equal(noise, np.array([1,1]))
def test_unweighted_stabiliser_graph_from_networkx(): w = nx.Graph() w.add_edge(0, 1, qubit_id=0, weight=7.0) w.add_edge(0, 5, qubit_id=1, weight=14.0) w.add_edge(0, 2, qubit_id=2, weight=9.0) w.add_edge(1, 2, qubit_id=-1, weight=10.0) w.add_edge(1, 3, qubit_id=3, weight=15.0) w.add_edge(2, 5, qubit_id=4, weight=2.0) w.add_edge(2, 3, qubit_id=-1, weight=11.0) w.add_edge(3, 4, qubit_id=5, weight=6.0) w.add_edge(4, 5, qubit_id=6, weight=9.0) m = Matching(w) assert(m.num_qubits == 7) assert(m.num_stabilisers == 6) assert(m.stabiliser_graph.shortest_path(3, 5) == [3, 2, 5]) assert(m.stabiliser_graph.distance(5, 0) == pytest.approx(11.0)) assert(np.array_equal( m.decode(np.array([1,0,1,0,0,0])), np.array([0,0,1,0,0,0,0])) ) with pytest.raises(ValueError): m.decode(np.array([1,1,0])) with pytest.raises(ValueError): m.decode(np.array([1,1,1,0,0,0])) assert(np.array_equal( m.decode(np.array([1,0,0,0,0,1])), np.array([0,0,1,0,1,0,0])) ) assert(np.array_equal( m.decode(np.array([0,1,0,0,0,1])), np.array([0,0,0,0,1,0,0])) )
def test_isolated_negative_weight(nn): m = Matching() m.add_edge(0, 1, 0, 1) m.add_edge(1, 2, 1, -10) m.add_edge(2, 3, 2, 1) m.add_edge(3, 0, 3, 1) c, w = m.decode([0, 1, 1, 0], return_weight=True, num_neighbours=nn) assert np.array_equal(c, np.array([0, 1, 0, 0])) assert w == -10
def test_double_weight_matching(): w = nx.Graph() w.add_edge(0, 1, qubit_id=0, weight=0.97) w.add_edge(2, 3, qubit_id=1, weight=1.98) w.add_edge(0, 2, qubit_id=2, weight=1.1) w.add_edge(1, 3, qubit_id=3, weight=1.2) m = Matching(w) assert (list(m.decode(np.array([1, 1, 1, 1]))) == list(np.array([0, 0, 1, 1])))
def test_mwpm_noisy_decode(n, z_err, c_expected): fn = "css_2D-toric_(4,4)_[[18,2,3]]_Hx.npz" H = load_npz(os.path.join(TEST_DIR, 'data', fn)) m = Matching(H, repetitions=z_err.shape[1]) n_all = np.cumsum(n, 0) % 2 z_noiseless = H.dot(n_all.T) % 2 z_noisy = (z_noiseless + z_err) % 2 z_noisy[:,1:] = (z_noisy[:,1:] - z_noisy[:,:-1]) % 2 c = m.decode(z_noisy) assert(np.array_equal(c, c_expected))
def test_mwpm_decode_method(): fn = "css_2D-toric_(4,4)_[[18,2,3]]_Hx.npz" H = load_npz(os.path.join(TEST_DIR, 'data', fn)) m = Matching(H) n = np.zeros(H.shape[1], dtype=int) n[5] = 1 n[10] = 1 z = H.dot(n) % 2 c = m.decode(z) assert(np.array_equal(c,n))
def test_negative_and_positive_in_matching(nn): g = nx.Graph() g.add_edge(0, 1, fault_ids=0, weight=1) g.add_edge(1, 2, fault_ids=1, weight=-10) g.add_edge(2, 3, fault_ids=2, weight=1) g.add_edge(3, 0, fault_ids=3, weight=1) m = Matching(g) c, w = m.decode([0, 1, 0, 1], return_weight=True, num_neighbours=nn) assert np.array_equal(c, np.array([0, 1, 1, 0])) assert w == -9
def test_matching_correct(): g = nx.Graph() g.add_edge(0, 1, weight=1.24, qubit_id=0) g.add_edge(1, 2, weight=1.31, qubit_id=1) g.add_edge(2, 3, weight=1.41, qubit_id=2) g.add_edge(0, 4, weight=1.51, qubit_id=3) g.add_edge(1, 5, weight=1.65, qubit_id=4) g.add_edge(2, 6, weight=1.15, qubit_id=5) g.add_edge(3, 7, weight=1.44, qubit_id=6) g.add_edge(4, 5, weight=1.70, qubit_id=7) g.add_edge(5, 6, weight=1.9, qubit_id=8) g.add_edge(6, 7, weight=1.12, qubit_id=9) g.add_edge(4, 8, weight=1.87, qubit_id=10) g.add_edge(5, 9, weight=1.91, qubit_id=11) g.add_edge(6, 10, weight=1.09, qubit_id=12) g.add_edge(7, 11, weight=1.21, qubit_id=13) g.add_edge(8, 9, weight=1.99, qubit_id=14) g.add_edge(9, 10, weight=1.01, qubit_id=15) g.add_edge(10, 11, weight=1.06, qubit_id=16) g.add_edge(8, 12, weight=1.16, qubit_id=17) g.add_edge(9, 13, weight=1.38, qubit_id=18) g.add_edge(10, 14, weight=1.66, qubit_id=19) g.add_edge(11, 15, weight=1.58, qubit_id=20) g.add_edge(12, 13, weight=1.12, qubit_id=21) g.add_edge(13, 14, weight=1.50, qubit_id=22) g.add_edge(14, 15, weight=1.00, qubit_id=23) m = Matching(g) assert sum(m.decode([0] * 16, num_neighbours=20)) == 0 assert sum(m.decode([0] * 16, num_neighbours=None)) == 0 z = np.zeros(16, dtype=np.uint8) z[0] = 1 z[5] = 1 z[6] = 1 z[11] = 1 z[14] = 1 z[15] = 1 assert np.array_equal( m.decode(z, num_neighbours=20).nonzero()[0], np.array([0, 4, 12, 16, 23])) assert np.array_equal( m.decode(z, num_neighbours=None).nonzero()[0], np.array([0, 4, 12, 16, 23]))
def test_negative_weight_repetition_code(nn): m = Matching() m.add_edge(0, 1, 0, -1) m.add_edge(1, 2, 1, -1) m.add_edge(2, 3, 2, -1) m.add_edge(3, 4, 3, -1) m.add_edge(4, 5, 4, -1) m.add_edge(5, 0, 5, -1) c, w = m.decode([0, 1, 1, 0, 0, 0], return_weight=True, num_neighbours=nn) assert np.array_equal(c, np.array([1, 0, 1, 1, 1, 1])) assert w == -5
def test_matching_weight(n, num_neighbours): p = 0.4 H = repetition_code(n) noise = np.random.rand(n) < p weights = np.random.rand(n) s = H @ noise % 2 m = Matching(H, spacelike_weights=weights) corr, weight = m.decode(s, num_neighbours=num_neighbours, return_weight=True) expected_weight = np.sum(weights[corr == 1]) assert expected_weight == pytest.approx(weight)
def test_odd_3d_syndrome_raises_value_error(): H = csr_matrix(np.array([[1,1,0],[0,1,1]])) m = Matching(H) with pytest.raises(ValueError): m.decode(np.array([[1,0],[0,0]]))
def test_3d_syndrome_raises_value_error_when_repetitions_not_set(): H = csr_matrix(np.array([[1, 1, 0], [0, 1, 1]])) m = Matching(H) with pytest.raises(ValueError): m.decode(np.array([[1, 0], [0, 0]]))