def test_simple(self): """Simple test of normalization node.""" data = np.array([1.0, 2.0, 3.0, 3.0]) error = np.array([0.1, 0.2, 0.3, 0.3]) expected_data = np.array([0.0, 0.5, 1.0, 1.0]) expected_error = np.array([0.05, 0.1, 0.15, 0.15]) node = MinMaxNormalize() processed_data = node(data=data) np.testing.assert_array_almost_equal( unp.nominal_values(processed_data), expected_data, ) processed_data = node(data=unp.uarray(nominal_values=data, std_devs=error)) np.testing.assert_array_almost_equal( unp.nominal_values(processed_data), expected_data, ) np.testing.assert_array_almost_equal( unp.std_devs(processed_data), expected_error, )
def get_processor( meas_level: MeasLevel = MeasLevel.CLASSIFIED, meas_return: str = "avg", normalize: bool = True, ) -> DataProcessor: """Get a DataProcessor that produces a continuous signal given the options. Args: meas_level: The measurement level of the data to process. meas_return: The measurement return (single or avg) of the data to process. normalize: Add a data normalization node to the Kerneled data processor. Returns: An instance of DataProcessor capable of dealing with the given options. Raises: DataProcessorError: if the measurement level is not supported. """ if meas_level == MeasLevel.CLASSIFIED: return DataProcessor("counts", [Probability("1")]) if meas_level == MeasLevel.KERNELED: if meas_return == "single": processor = DataProcessor("memory", [AverageData(axis=1), SVD()]) else: processor = DataProcessor("memory", [SVD()]) if normalize: processor.append(MinMaxNormalize()) return processor raise DataProcessorError(f"Unsupported measurement level {meas_level}.")
def test_normalize(self): """Test that by adding a normalization node we get a signal between 1 and 1.""" processor = DataProcessor("memory", [SVD(), MinMaxNormalize()]) self.assertFalse(processor.is_trained) processor.train([self.data.data(idx) for idx in [0, 1]]) self.assertTrue(processor.is_trained) all_expected = np.array([[0.0, 1.0, 0.5, 0.75], [1.0, 0.0, 0.5, 0.25]]) # Test processing of all data processed = processor(self.data.data())[0] self.assertTrue(np.allclose(processed, all_expected))
def test_simple(self): """Simple test of normalization node.""" data = np.array([1.0, 2.0, 3.0, 3.0]) error = np.array([0.1, 0.2, 0.3, 0.3]) expected_data = np.array([0.0, 0.5, 1.0, 1.0]) expected_error = np.array([0.05, 0.1, 0.15, 0.15]) node = MinMaxNormalize() self.assertTrue(np.allclose(node(data)[0], expected_data)) self.assertTrue(np.allclose(node(data, error)[0], expected_data)) self.assertTrue(np.allclose(node(data, error)[1], expected_error))
def test_normalize(self): """Test that by adding a normalization node we get a signal between 1 and 1.""" processor = DataProcessor("memory", [SVD(), MinMaxNormalize()]) self.assertFalse(processor.is_trained) processor.train([self.data.data(idx) for idx in [0, 1]]) self.assertTrue(processor.is_trained) # Test processing of all data processed = processor(self.data.data()) np.testing.assert_array_almost_equal( unp.nominal_values(processed), np.array([[0.0, 1.0], [1.0, 0.0], [0.5, 0.5], [0.75, 0.25]]), )
def test_json(self): """Check if the node is serializable.""" node = MinMaxNormalize() self.assertRoundTripSerializable(node, check_func=self.json_equiv)
def test_json_multi_node(self): """Check if the data processor with multiple nodes is serializable.""" node1 = MinMaxNormalize() node2 = AverageData(axis=2) processor = DataProcessor("counts", [node1, node2]) self.assertRoundTripSerializable(processor, check_func=self.json_equiv)
def test_json_single_node(self): """Check if the data processor is serializable.""" node = MinMaxNormalize() processor = DataProcessor("counts", [node]) self.assertRoundTripSerializable(processor, check_func=self.json_equiv)