def test_simple(self): """Simple test of averaging. Standard error of mean is generated.""" datum = unp.uarray([[1, 2], [3, 4], [5, 6]], np.full((3, 2), np.nan)) node = AverageData(axis=1) processed_data = node(data=datum) np.testing.assert_array_almost_equal( unp.nominal_values(processed_data), np.array([1.5, 3.5, 5.5]), ) np.testing.assert_array_almost_equal( unp.std_devs(processed_data), np.array([0.5, 0.5, 0.5]) / np.sqrt(2), ) node = AverageData(axis=0) processed_data = node(data=datum) np.testing.assert_array_almost_equal( unp.nominal_values(processed_data), np.array([3.0, 4.0]), ) np.testing.assert_array_almost_equal( unp.std_devs(processed_data), np.array([1.632993161855452, 1.632993161855452]) / np.sqrt(3), )
def get_processor( meas_level: MeasLevel = MeasLevel.CLASSIFIED, meas_return: str = "avg", normalize: bool = True, ) -> DataProcessor: """Get a DataProcessor that produces a continuous signal given the options. Args: meas_level: The measurement level of the data to process. meas_return: The measurement return (single or avg) of the data to process. normalize: Add a data normalization node to the Kerneled data processor. Returns: An instance of DataProcessor capable of dealing with the given options. Raises: DataProcessorError: if the measurement level is not supported. """ if meas_level == MeasLevel.CLASSIFIED: return DataProcessor("counts", [Probability("1")]) if meas_level == MeasLevel.KERNELED: if meas_return == "single": processor = DataProcessor("memory", [AverageData(axis=1), SVD()]) else: processor = DataProcessor("memory", [SVD()]) if normalize: processor.append(MinMaxNormalize()) return processor raise DataProcessorError(f"Unsupported measurement level {meas_level}.")
def test_averaging_and_svd(self): """Test averaging followed by a SVD.""" processor = DataProcessor("memory", [AverageData(axis=1), SVD()]) # Test training using the calibration points self.assertFalse(processor.is_trained) processor.train([self.data.data(idx) for idx in [0, 1]]) self.assertTrue(processor.is_trained) # Test the excited state processed, error = processor(self.data.data(0)) self.assertTrue(np.allclose(processed, self._sig_es)) # Test the ground state processed, error = processor(self.data.data(1)) self.assertTrue(np.allclose(processed, self._sig_gs)) # Test the x90p rotation processed, error = processor(self.data.data(2)) self.assertTrue(np.allclose(processed, self._sig_x90)) self.assertTrue(np.allclose(error, np.array([0.25, 0.25]))) # Test the x45p rotation processed, error = processor(self.data.data(3)) expected_std = np.array([np.std([1, 1, 1, -1]) / np.sqrt(4.0) / 2] * 2) self.assertTrue(np.allclose(processed, self._sig_x45)) self.assertTrue(np.allclose(error, expected_std))
def test_averaging(self): """Test that averaging of the datums produces the expected IQ points.""" processor = DataProcessor("memory", [AverageData(axis=1)]) # Test that we get the expected outcome for the excited state processed = processor(self.data.data(0)) np.testing.assert_array_almost_equal( unp.nominal_values(processed), np.array([[1.0, 1.0], [-1.0, 1.0]]), ) np.testing.assert_array_almost_equal( unp.std_devs(processed), np.array([[0.15811388300841894, 0.1], [0.15811388300841894, 0.0]]) / 2.0, ) # Test that we get the expected outcome for the ground state processed = processor(self.data.data(1)) np.testing.assert_array_almost_equal( unp.nominal_values(processed), np.array([[-1.0, -1.0], [1.0, -1.0]]), ) np.testing.assert_array_almost_equal( unp.std_devs(processed), np.array([[0.15811388300841894, 0.1], [0.15811388300841894, 0.0]]) / 2.0, )
def test_process_all_data(self): """Test that we can process all data at once.""" processor = DataProcessor("memory", [AverageData(axis=1), SVD()]) # Test training using the calibration points self.assertFalse(processor.is_trained) processor.train([self.data.data(idx) for idx in [0, 1]]) self.assertTrue(processor.is_trained) all_expected = np.vstack(( self._sig_es.reshape(1, 2), self._sig_gs.reshape(1, 2), self._sig_x90.reshape(1, 2), self._sig_x45.reshape(1, 2), )) # Test processing of all data processed = processor(self.data.data()) np.testing.assert_array_almost_equal( unp.nominal_values(processed), all_expected, ) # Test processing of each datum individually for idx, expected in enumerate( [self._sig_es, self._sig_gs, self._sig_x90, self._sig_x45]): processed = processor(self.data.data(idx)) np.testing.assert_array_almost_equal( unp.nominal_values(processed), expected, )
def test_iq_averaging(self): """Test averaging of IQ-data.""" iq_data = [ [[-6.20601501e14, -1.33257051e15], [-1.70921324e15, -4.05881657e15]], [[-5.80546502e14, -1.33492509e15], [-1.65094637e15, -4.05926942e15]], [[-4.04649069e14, -1.33191056e15], [-1.29680377e15, -4.03604815e15]], [[-2.22203874e14, -1.30291309e15], [-8.57663429e14, -3.97784973e15]], [[-2.92074029e13, -1.28578530e15], [-9.78824053e13, -3.92071056e15]], [[1.98056981e14, -1.26883024e15], [3.77157017e14, -3.87460328e15]], [[4.29955888e14, -1.25022995e15], [1.02340118e15, -3.79508679e15]], [[6.38981344e14, -1.25084614e15], [1.68918514e15, -3.78961044e15]], [[7.09988897e14, -1.21906634e15], [1.91914171e15, -3.73670664e15]], [[7.63169115e14, -1.20797552e15], [2.03772603e15, -3.74653863e15]], ] self.create_experiment(iq_data, single_shot=True) avg_iq = AverageData(axis=0) avg_datum, error = avg_iq(self.iq_experiment.data(0)["memory"]) expected_avg = np.array([[8.82943876e13, -1.27850527e15], [1.43410186e14, -3.89952402e15]]) expected_std = np.array([[5.07650185e14, 4.44664719e13], [1.40522641e15, 1.22326831e14]]) / np.sqrt(10) self.assertTrue(np.allclose(avg_datum, expected_avg)) self.assertTrue(np.allclose(error, expected_std))
def test_distorted_iq_data(self): """Test if uncertainty can consider correlation. SVD projects IQ data onto I-axis, and input different data sets that have the same mean and same variance but squeezed along different axis. """ svd_node = SVD() svd_node._scales = [1.0] svd_node._main_axes = [np.array([1, 0])] svd_node._means = [(0.0, 0.0)] processor = DataProcessor("memory", [AverageData(axis=1), svd_node]) dist_i_axis = { "memory": [[[-1, 0]], [[-0.5, 0]], [[0.0, 0]], [[0.5, 0]], [[1, 0]]] } dist_q_axis = { "memory": [[[0, -1]], [[0, -0.5]], [[0, 0.0]], [[0, 0.5]], [[0, 1]]] } out_i = processor(dist_i_axis) self.assertAlmostEqual(out_i[0].nominal_value, 0.0) self.assertAlmostEqual(out_i[0].std_dev, 0.31622776601683794) out_q = processor(dist_q_axis) self.assertAlmostEqual(out_q[0].nominal_value, 0.0) self.assertAlmostEqual(out_q[0].std_dev, 0.0)
def test_simple(self): """Simple test of averaging.""" datum = np.array([[1, 2], [3, 4], [5, 6]]) node = AverageData(axis=1) self.assertTrue(np.allclose(node(datum)[0], np.array([1.5, 3.5, 5.5]))) self.assertTrue( np.allclose( node(datum)[1], np.array([0.5, 0.5, 0.5]) / np.sqrt(2))) node = AverageData(axis=0) self.assertTrue(np.allclose(node(datum)[0], np.array([3.0, 4.0]))) std = np.std([1, 3, 5]) self.assertTrue( np.allclose(node(datum)[1], np.array([std, std]) / np.sqrt(3)))
def test_iq_averaging(self): """Test averaging of IQ-data.""" # This data represents IQ data for a single quantum circuit with 10 shots and 2 slots. iq_data = np.array( [[ [[-6.20601501e14, -1.33257051e15], [-1.70921324e15, -4.05881657e15]], [[-5.80546502e14, -1.33492509e15], [-1.65094637e15, -4.05926942e15]], [[-4.04649069e14, -1.33191056e15], [-1.29680377e15, -4.03604815e15]], [[-2.22203874e14, -1.30291309e15], [-8.57663429e14, -3.97784973e15]], [[-2.92074029e13, -1.28578530e15], [-9.78824053e13, -3.92071056e15]], [[1.98056981e14, -1.26883024e15], [3.77157017e14, -3.87460328e15]], [[4.29955888e14, -1.25022995e15], [1.02340118e15, -3.79508679e15]], [[6.38981344e14, -1.25084614e15], [1.68918514e15, -3.78961044e15]], [[7.09988897e14, -1.21906634e15], [1.91914171e15, -3.73670664e15]], [[7.63169115e14, -1.20797552e15], [2.03772603e15, -3.74653863e15]], ]], dtype=float, ) iq_std = np.full_like(iq_data, np.nan) self.create_experiment_data(unp.uarray(iq_data, iq_std), single_shot=True) avg_iq = AverageData(axis=0) processed_data = avg_iq( data=np.asarray(self.iq_experiment.data(0)["memory"])) expected_avg = np.array([[8.82943876e13, -1.27850527e15], [1.43410186e14, -3.89952402e15]]) expected_std = np.array([[5.07650185e14, 4.44664719e13], [1.40522641e15, 1.22326831e14]]) / np.sqrt(10) np.testing.assert_array_almost_equal( unp.nominal_values(processed_data), expected_avg, decimal=-8, ) np.testing.assert_array_almost_equal( unp.std_devs(processed_data), expected_std, decimal=-8, )
def test_with_error(self): """Compute error propagation. This is quadratic sum divided by samples.""" datum = unp.uarray( [[1, 2, 3, 4, 5, 6]], [[0.1, 0.2, 0.3, 0.4, 0.5, 0.6]], ) node = AverageData(axis=1) processed_data = node(data=datum) self.assertAlmostEqual(processed_data[0].nominal_value, 3.5) # sqrt(0.1**2 + 0.2**2 + ... + 0.6**2) / 6 self.assertAlmostEqual(processed_data[0].std_dev, 0.15898986690282427)
def test_normalize(self): """Test that by adding a normalization node we get a signal between 1 and 1.""" processor = DataProcessor("memory", [AverageData(axis=1), SVD(), MinMaxNormalize()]) self.assertFalse(processor.is_trained) processor.train([self.data.data(idx) for idx in [0, 1]]) self.assertTrue(processor.is_trained) # Test processing of all data processed = processor(self.data.data()) np.testing.assert_array_almost_equal( unp.nominal_values(processed), np.array([[0.0, 1.0], [1.0, 0.0], [0.5, 0.5], [0.75, 0.25]]), )
def test_normalize(self): """Test that by adding a normalization node we get a signal between 1 and 1.""" processor = DataProcessor( "memory", [AverageData(axis=1), SVD(), MinMaxNormalize()]) self.assertFalse(processor.is_trained) processor.train([self.data.data(idx) for idx in [0, 1]]) self.assertTrue(processor.is_trained) all_expected = np.array([[0.0, 1.0, 0.5, 0.75], [1.0, 0.0, 0.5, 0.25]]) # Test processing of all data processed = processor(self.data.data())[0] self.assertTrue(np.allclose(processed, all_expected))
def test_averaging_and_svd(self): """Test averaging followed by a SVD.""" processor = DataProcessor("memory", [AverageData(axis=1), SVD()]) # Test training using the calibration points self.assertFalse(processor.is_trained) processor.train([self.data.data(idx) for idx in [0, 1]]) self.assertTrue(processor.is_trained) # Test the excited state processed = processor(self.data.data(0)) np.testing.assert_array_almost_equal( unp.nominal_values(processed), self._sig_es, ) # Test the ground state processed = processor(self.data.data(1)) np.testing.assert_array_almost_equal( unp.nominal_values(processed), self._sig_gs, ) # Test the x90p rotation processed = processor(self.data.data(2)) np.testing.assert_array_almost_equal( unp.nominal_values(processed), self._sig_x90, ) np.testing.assert_array_almost_equal( unp.std_devs(processed), np.array([0.25, 0.25]), ) # Test the x45p rotation processed = processor(self.data.data(3)) np.testing.assert_array_almost_equal( unp.nominal_values(processed), self._sig_x45, ) np.testing.assert_array_almost_equal( unp.std_devs(processed), np.array([np.std([1, 1, 1, -1]) / np.sqrt(4.0) / 2] * 2), )
def test_averaging(self): """Test that averaging of the datums produces the expected IQ points.""" processor = DataProcessor("memory", [AverageData(axis=1)]) # Test that we get the expected outcome for the excited state processed, error = processor(self.data.data(0)) expected_avg = np.array([[1.0, 1.0], [-1.0, 1.0]]) expected_std = np.array([[0.15811388300841894, 0.1], [0.15811388300841894, 0.0]]) / 2.0 self.assertTrue(np.allclose(processed, expected_avg)) self.assertTrue(np.allclose(error, expected_std)) # Test that we get the expected outcome for the ground state processed, error = processor(self.data.data(1)) expected_avg = np.array([[-1.0, -1.0], [1.0, -1.0]]) expected_std = np.array([[0.15811388300841894, 0.1], [0.15811388300841894, 0.0]]) / 2.0 self.assertTrue(np.allclose(processed, expected_avg)) self.assertTrue(np.allclose(error, expected_std))
def test_with_error_partly_non_error(self): """Compute error propagation. Some elements have no error.""" datum = unp.uarray( [ [1, 2, 3, 4, 5, 6], [1, 2, 3, 4, 5, 6], ], [ [0.1, 0.2, 0.3, 0.4, 0.5, 0.6], [np.nan, 0.2, 0.3, 0.4, 0.5, 0.6], ], ) node = AverageData(axis=1) processed_data = node(data=datum) self.assertAlmostEqual(processed_data[0].nominal_value, 3.5) # sqrt(0.1**2 + 0.2**2 + ... + 0.6**2) / 6 self.assertAlmostEqual(processed_data[0].std_dev, 0.15898986690282427) self.assertAlmostEqual(processed_data[1].nominal_value, 3.5) # sqrt((0.1 - 0.35)**2 + (0.2 - 0.35)**2 + ... + (0.6 - 0.35)**2) / 6 self.assertAlmostEqual(processed_data[1].std_dev, 0.6972166887783964)
def test_json_multi_node(self): """Check if the data processor with multiple nodes is serializable.""" node1 = MinMaxNormalize() node2 = AverageData(axis=2) processor = DataProcessor("counts", [node1, node2]) self.assertRoundTripSerializable(processor, check_func=self.json_equiv)
def test_json(self): """Check if the node is serializable.""" node = AverageData(axis=3) self.assertRoundTripSerializable(node, check_func=self.json_equiv)