def test_sync_from_storage(self): io_reader, io_writer = MemoryDataSetIOFactory.get_reader_writer_pair() data_set_consumer = DataSet(storage_reader=io_reader, name='consumer') some_array = DataArray('some_array', 'label', shape=(5, 1)) io_writer.sync_add_data_array_to_storage(some_array) data_set_consumer.sync_from_storage(-1) self.assertTrue(hasattr(data_set_consumer, 'some_array'))
def test_add_array(self): array_name = 'some_array' data_array = DataArray(array_name, 'label', preset_data=np.array([1, 2, 3, 4, 5])) data_set = DataSet() data_set.add_array(data_array) self.assertListEqual(list(data_array), list(data_set.some_array)) self.assertListEqual(list(data_array), list(data_set.data_arrays[array_name])) self.assertIs(data_set.some_array, data_set.data_arrays[array_name])
def test_constructor_multiple_data_arrays(self): storage = MagicMock(spec=MemoryDataSetIOWriter) name = 'ItsAName' array_name = 'ItsAnArray' user_data = {'some': 'data'} x_points = np.array(range(0, 10)) y_points = np.array(range(0, 5)) x = DataArray(name='x', label='x-axis', unit='mV', is_setpoint=True, preset_data=np.array(x_points)) y = DataArray(name='y', label='y-axis', unit='mV', is_setpoint=True, set_arrays=(x, ), preset_data=np.tile(np.array(y_points), [x.size, 1])) z = DataArray(name=array_name, label='z-axis', unit='ma', set_arrays=[y, x], preset_data=np.NaN * np.ones( (x_points.size, y_points.size))) other_z = DataArray(name='other_array', label='z-axis', unit='ma', set_arrays=[y, x], preset_data=np.NaN * np.ones( (x_points.size, y_points.size))) data_set = DataSet(storage_writer=storage, name=name, user_data=user_data, data_arrays=[z, other_z], set_arrays=[y, x]) self.assertEqual([storage], data_set.storage) self.assertEqual(name, data_set.name) self.assertDictEqual(user_data, data_set.user_data) self.assertEqual(z, data_set.data_arrays[array_name]) self.assertEqual(array_name, data_set.default_array_name) with self.assertRaises(TypeError) as error: DataSet(data_arrays=np.array([1, 2, 3, 4])) self.assertIn( "'data_arrays' have to be of type 'DataArray', not <class 'numpy.ndarray'>", error.exception.args)
def test_save_to_storage_raises_error(self): data_set = DataSet(storage_reader=MagicMock()) writer = MagicMock() error_args = ValueError, "It is not allowed to have both storage_reader and storage_writer." self.assertRaisesRegex(*error_args, data_set.add_storage_writer, writer) writer.assert_not_called()
def test_set_arrays_access_via_attribute(self): x_points = np.array(range(0, 2)) y_points = np.array(range(0, 2)) x = DataArray(name="x", label="x-axis", unit="mV", is_setpoint=True, preset_data=x_points) y = DataArray(name="y", label="y-axis", unit="mV", is_setpoint=True, preset_data=np.tile(np.array(y_points), [x.size, 1])) z = DataArray(name="z", label="z-axis", unit="ma", set_arrays=(x, y), preset_data=np.NaN * np.ones( (x_points.size, y_points.size))) data_set = DataSet(data_arrays=[z]) attrs = dir(data_set) self.assertIn('x', attrs) self.assertIn('y', attrs) self.assertEqual(data_set.x, x) self.assertEqual(data_set.y, y)
def test_constructor(self): storage = MagicMock(spec=MemoryDataSetIOWriter) name = 'ItsAName' array_name = 'ItsAnArray' time_stamp = datetime.datetime(2018, 12, 24, 18, 0) user_data = {'some': 'data'} set_arrays = DataArray('setpoints', 'X', is_setpoint=True, preset_data=np.array([1, 2, 3, 4, 5])) data_arrays = DataArray(array_name, 'results', shape=(5, ), set_arrays=[set_arrays]) data_set = DataSet(storage_writer=storage, name=name, time_stamp=time_stamp, user_data=user_data, data_arrays=data_arrays, set_arrays=set_arrays) self.assertEqual([storage], data_set.storage) self.assertEqual(name, data_set.name) self.assertEqual(time_stamp, data_set.time_stamp) self.assertDictEqual(user_data, data_set.user_data) self.assertEqual(data_arrays, data_set.data_arrays[array_name]) self.assertEqual(array_name, data_set.default_array_name)
def test_run_process_multiple_signal_processors(self): data_set = DataSet(data_arrays=DataArray('x', 'x', preset_data=array([1, 2, 3, 4, 5]))) class PlusOneSignalProcessor(SignalProcessorInterface): def __init__(self): self._signal_data = None def run_process(self, signal_data: DataSet) -> DataSet: self._signal_data = signal_data signal_data.data_arrays['x'] += 1 return signal_data class TimesTwoSignalProcessor(SignalProcessorInterface): def __init__(self): self._signal_data = None def run_process(self, signal_data: DataSet) -> DataSet: self._signal_data = signal_data signal_data.data_arrays['x'] *= 2 return signal_data signal_processor_runner = SignalProcessorRunner() signal_processor_runner.add_signal_processor(PlusOneSignalProcessor()) signal_processor_runner.add_signal_processor(TimesTwoSignalProcessor()) new_data_set = signal_processor_runner.run(data_set) self.assertIs(data_set.data_arrays['x'], new_data_set.data_arrays['x']) self.assertTrue(array_equal(new_data_set.data_arrays['x'], array([4, 6, 8, 10, 12])))
def load( name: Optional[str] = None, document_id: Optional[str] = None, database: str = MongoDataSetIO.DEFAULT_DATABASE_NAME, collection: str = MongoDataSetIO.DEFAULT_COLLECTION_NAME ) -> DataSet: """ Load an existing data set from the mongodb. Args: name: Name of the data set. document_id: _id of the data set. database: Name of the database. collection: Name of the collections. Returns: A new instance of the underlying data set. Raises: DocumentNotFoundError: If document_id or name do not match any data set in database. """ reader = MongoDataSetIOReader(name, document_id, database=database, collection=collection) return DataSet(storage_reader=reader)
def __dummy_data_set(period, sample_rate, width, resolution): time_data = TestProcessSawtooth2D.__dummy_time_data(period, sample_rate) set_array = DataArray('ScopeTime', 'Time', unit='seconds', is_setpoint=True, preset_data=time_data) scope_data_1 = TestProcessSawtooth2D.__dummy_scope_data(time_data, resolution[0], period, width[0]) data_array_1 = TestProcessSawtooth2D.__dummy_data_array(set_array, scope_data_1, channel_index=1, trace_number=1) scope_data_2 = TestProcessSawtooth2D.__dummy_scope_data(time_data, resolution[1], period, width[1]) data_array_2 = TestProcessSawtooth2D.__dummy_data_array(set_array, scope_data_2, channel_index=2, trace_number=2) data_set = DataSet() data_set.user_data = {'resolution': resolution, 'width': width} data_set.add_array(data_array_1) data_set.add_array(data_array_2) return data_set
def test_run_process_without_signal_processor(self): data_set = DataSet(data_arrays=DataArray('x', 'x', preset_data=array([1, 2, 3, 4, 5]))) signal_processor_runner = SignalProcessorRunner() new_data_set = signal_processor_runner.run(data_set) self.assertIs(data_set.data_arrays['x'], new_data_set.data_arrays['x']) self.assertTrue(array_equal(new_data_set.data_arrays['x'], array([1, 2, 3, 4, 5])))
def test__repr__(self): data_set = DataSet(name='some_name', time_stamp=datetime.datetime(2000, 1, 1), user_data={'user': '******'}, set_arrays=[]) expect = "DataSet(id={}, name='{}', storage_writer=[], storage_reader=None, time_stamp={}, user_data={}, " \ "data_arrays={}, set_arrays={})".format( id(data_set), 'some_name', 'datetime.datetime(2000, 1, 1, 0, 0)', {'user': '******'}, {}, {}) self.assertEqual(expect, repr(data_set))
def test_integrate_with_data_set_io_add_array(self): io_reader, io_writer = MemoryDataSetIOFactory.get_reader_writer_pair() data_set_consumer = DataSet(storage_reader=io_reader) some_array = DataArray('some_array', 'label', shape=(5, 5)) data_set_producer = DataSet(storage_writer=io_writer) data_set_producer.add_array(some_array) data_set_consumer.sync_from_storage(-1) self.assertTrue(hasattr(data_set_consumer, 'some_array')) self.assertEqual('some_array', data_set_consumer.some_array.name) self.assertEqual((5, 5), data_set_consumer.some_array.shape) self.assertIsNot(some_array, data_set_consumer.some_array)
def run_process(self, signal_data: DataSet) -> DataSet: """ Extracts a 2D image from a readout dot responce measured with an acquisition device. Args: signal_data: The readout dot reponse data coming from the acquisition device. The data user data of the data set should contain the width and resolution settings. Returns: A data set which contains a 2D image with the charge stability diagram. """ width_x, width_y = signal_data.user_data['width'] resolution_x, resolution_y = signal_data.user_data['resolution'] output_signal_data = DataSet(user_data=signal_data.user_data) for data_array in signal_data.data_arrays.values(): ProcessSawtooth2D.__check_sample_count_slow_sawtooth( data_array, width_y) ProcessSawtooth2D.__check_sample_count_fast_sawtooth( data_array, width_x, resolution_x, resolution_y) ProcessSawtooth2D.__check_matching_cuttoff(width_x, width_y, resolution_x, resolution_y) sample_count = len(data_array) samples_sawtooth_x = int(sample_count / resolution_y) samples_edge_x = int(sample_count / resolution_y * width_x) samples_egde_y = int(sample_count * width_y) offsets = np.arange(0, samples_egde_y, samples_sawtooth_x, dtype=np.int) identifier = f'{data_array.name}_SawtoothProcessed2D' sliced_data = np.array( [data_array[o:o + samples_edge_x] for o in offsets]) result_data = DataArray(identifier, data_array.label, preset_data=sliced_data) output_signal_data.add_array(result_data) return output_signal_data
def test_save_to_storage(self): data_set = DataSet() data_set.name = 'TheName' data_set.time_stamp = datetime.datetime(2018, 12, 24, 18) data_set.user_data = {'Data': 'stuff'} data_set.default_array_name = 'TheDefault' data_set.add_array(self.data_array) writer = MagicMock() data_set.add_storage_writer(writer) expected_calls = [ call.sync_metadata_to_storage('name', 'TheName'), call.sync_metadata_to_storage('time_stamp', datetime.datetime(2018, 12, 24, 18)), call.sync_metadata_to_storage('user_data', {'Data': 'stuff'}), call.sync_metadata_to_storage('default_array_name', 'TheDefault') ] writer.assert_has_calls(expected_calls) writer.sync_add_data_array_to_storage.assert_called()
def test_integrate_with_data_set_io_add_data(self): io_reader, io_writer = MemoryDataSetIOFactory.get_reader_writer_pair() data_set_consumer = DataSet(storage_reader=io_reader) some_array = DataArray('some_array', 'label', shape=(5, 5)) data_set_producer = DataSet(storage_writer=io_writer, data_arrays=some_array) data_set_producer.add_data((0, 0), {'some_array': 42}) data_set_producer.add_data((1, 1), {'some_array': 25}) data_set_consumer.sync_from_storage(-1) self.assertTrue(hasattr(data_set_consumer, 'some_array')) self.assertIsNot(some_array, data_set_consumer.some_array) self.assertEqual(42, data_set_consumer.some_array[0][0]) self.assertEqual(25, data_set_consumer.some_array[1][1])
def test_sync_from_storage_meta_data(self): mock_queue = MagicMock() with patch('qilib.data_set.mongo_data_set_io_reader.MongoDataSetIO') as mock_io, patch( 'qilib.data_set.mongo_data_set_io_reader.Thread') as thread, \ patch('qilib.data_set.mongo_data_set_io_reader.Queue', return_value=mock_queue): reader = MongoDataSetIOReader(name='test') thread.assert_called_once() mock_io.assert_called_once_with('test', None, create_if_not_found=False, collection='data_sets', database='qilib') data_set = DataSet(storage_reader=reader) mock_queue.get.return_value = { 'updateDescription': { 'updatedFields': { 'metadata': { 'name': 'test_name' } } } } data_set.sync_from_storage(-1) self.assertEqual('test_name', data_set.name) mock_queue.get.return_value = { 'updateDescription': { 'updatedFields': { 'metadata.default_array_name': 'some_array' } } } data_set.sync_from_storage(-1) self.assertEqual('some_array', data_set.default_array_name)
def run_process(self, signal_data: DataSet) -> DataSet: """ Extracts a 1D image from a readout dot responce measured with an acquisition device Args: signal_data: The readout dot reponse data coming from the acquisition device. The data user data of the data set should contain the width and resolution settings. Returns: A data set which contains a 1D image with the charge stability diagram """ data_set = DataSet(user_data=signal_data.user_data) width = data_set.user_data['width'] for data_array in signal_data.data_arrays.values(): sample_count = len(data_array) sliced_data = data_array[:int(sample_count * width)] identifier = f'{data_array.name}_SawtoothProcessed1D' data_set.add_array(DataArray(identifier, data_array.label, preset_data=sliced_data)) return data_set
def test_metadata_triggers_update(self): name = 'Bobo' user_data = PythonJsonStructure(data='plata', snapshot=False) timestamp = datetime.datetime(2019, 12, 24) default_array_name = 'ThatsAGoodName' io_reader, io_writer = MemoryDataSetIOFactory.get_reader_writer_pair() data_set_consumer = DataSet(storage_reader=io_reader) data_set_producer = DataSet(storage_writer=io_writer) data_set_producer.name = name data_set_producer.user_data = user_data data_set_producer.time_stamp = timestamp data_set_producer.default_array_name = default_array_name data_set_consumer.sync_from_storage(-1) self.assertEqual(name, data_set_consumer.name) self.assertDictEqual(user_data, data_set_consumer.user_data) self.assertEqual(timestamp, data_set_consumer.time_stamp) self.assertEqual(default_array_name, data_set_consumer.default_array_name)
def test_add_data_set_arrays(self): x_points = np.array(range(0, 2)) y_points = np.array(range(0, 2)) x = DataArray(name="x", label="x-axis", unit="mV", is_setpoint=True, preset_data=x_points) y = DataArray(name="y", label="y-axis", unit="mV", is_setpoint=True, preset_data=np.tile(np.array(y_points), [x.size, 1])) z = DataArray(name="z", label="z-axis", unit="ma", set_arrays=(x, y), preset_data=np.NaN * np.ones( (x_points.size, y_points.size))) data_set = DataSet(data_arrays=z) data_set.add_data((1, 0), {'y': 23}) self.assertEqual(data_set.y[(1, 0)], 23)
def test_add_data(self): some_array = DataArray('some_array', 'label', shape=(5, 5)) data_set = DataSet(data_arrays=some_array) data_set.add_data((4, 4), {'some_array': 42}) self.assertEqual(42, some_array[4][4]) data_set.add_data(3, {'some_array': [1, 2, 3, 4, 5]}) self.assertListEqual([1, 2, 3, 4, 5], list(some_array[3]))
def test_constructor_multiple_set_arrays(self): storage = MagicMock(spec=MemoryDataSetIOWriter) name = 'ItsAName' array_name = self.data_array.name user_data = {'some': 'data'} data_set = DataSet(storage_writer=storage, name=name, user_data=user_data, data_arrays=self.data_array, set_arrays=[self.set_y, self.set_x]) self.assertEqual([storage], data_set.storage) self.assertEqual(name, data_set.name) self.assertDictEqual(user_data, data_set.user_data) self.assertEqual(self.data_array, data_set.data_arrays[array_name]) self.assertEqual(array_name, data_set.default_array_name)
def test_sync_from_storage_array_update(self): mock_queue = MagicMock() with patch('qilib.data_set.mongo_data_set_io_reader.MongoDataSetIO') as mock_io, patch( 'qilib.data_set.mongo_data_set_io_reader.Thread') as thread, \ patch('qilib.data_set.mongo_data_set_io_reader.Queue', return_value=mock_queue): reader = MongoDataSetIOReader(name='test') thread.assert_called_once() mock_io.assert_called_once_with('test', None, create_if_not_found=False, collection='data_sets', database='qilib') data_set = DataSet(storage_reader=reader) data_array = DataArray(name='test_array', label='lab', shape=(2, 2)) data_set.add_array(data_array) mock_queue.get.return_value = { 'updateDescription': { 'updatedFields': { 'array_updates': [[[0, 0], { 'test_array': 42 }], [[0, 1], { 'test_array': 25 }]] } } } data_set.sync_from_storage(-1) self.assertListEqual([42, 25], list(data_set.test_array[0])) mock_queue.get.return_value = { 'updateDescription': { 'updatedFields': { 'array_updates.1': [1, { 'test_array': [67, 67] }] } } } data_set.sync_from_storage(-1) self.assertListEqual([67, 67], list(data_set.test_array[1]))
def test_add_array_duplicate_raises_error(self): name = 'some_array' data_array = DataArray(name, 'label', preset_data=np.array([1, 2, 3, 4, 5])) data_set = DataSet() data_set.add_array(data_array) with self.assertRaises(ValueError) as error: data_set.add_array(data_array) self.assertEqual( ("DataSet already contains an array with the name '{}'".format( name), ), error.exception.args)
def test_add_data_higher_dimensions(self): some_array = DataArray('some_array', 'label', shape=(5, 5, 5, 5)) data_set = DataSet(data_arrays=some_array) data_set.add_data((3, 3, 3, 3), {'some_array': 0.42}) self.assertEqual(0.42, some_array[3][3][3][3]) double_array = [[1, 2, 3, 4, 5], [1, 2, 3, 4, 5], [1, 2, 3, 4, 5], [1, 2, 3, 4, 5], [1, 2, 3, 4, 5]] data_set.add_data((2, 2), {'some_array': double_array}) self.assertTrue(np.array_equal(double_array, some_array[2][2])) self.assertFalse(np.array_equal(double_array, some_array[2][1]))
def test_run(): with patch('qtt.measurements.post_processing.interfaces.signal_processor_interface.SignalProcessorInterface', spec=SignalProcessorInterface) as spi: class DummySignalProcessor(spi): def __init__(self): self._signal_data = None def run_process(self, signal_data: DataSet) -> DataSet: self._signal_data = signal_data return signal_data signal_processor_runner = SignalProcessorRunner() signal_processor_runner.add_signal_processor(DummySignalProcessor()) data_set = DataSet() signal_processor_runner.run(data_set) spi.run_process.assert_called_once() spi.run_process.assert_called_with(data_set)
def test_add_array_with_bad_name(self): data_array = DataArray('this is not a good name', 'label', preset_data=np.array([1, 2, 3, 4, 5])) data_set = DataSet() with self.assertRaisesRegex( SyntaxError, "'this is not a good name' is an invalid name for an identifier." ): data_set.add_array(data_array) data_array.name = 99 with self.assertRaisesRegex( ValueError, "Array name has to be string, not <class 'int'>"): data_set.add_array(data_array)
def test_string(self): name = 'ItsAName' array_name = 'ItsAnArray' user_data = {'some': 'data'} x_points = np.array(range(0, 10)) y_points = np.array(range(0, 5)) x = DataArray(name='x', label='x-axis', unit='mV', is_setpoint=True, preset_data=np.array(x_points)) y = DataArray(name='y', label='y-axis', unit='mV', is_setpoint=True, set_arrays=(x, ), preset_data=np.tile(np.array(y_points), [x.size, 1])) z = DataArray(name=array_name, label='z-axis', unit='ma', set_arrays=[y, x], preset_data=np.NaN * np.ones( (x_points.size, y_points.size))) other_z = DataArray(name='other_array', label='z-axis', unit='ma', set_arrays=[y, x], preset_data=np.NaN * np.ones( (x_points.size, y_points.size))) data_set = DataSet(name=name, user_data=user_data, data_arrays=[z, other_z], set_arrays=[y, x]) expected = "DataSet: ItsAName\n name | label | unit | shape | setpoint\n ItsAnArray | z-axis | " \ "ma | (10, 5) | False\n other_array | z-axis | ma | (10, 5) | False\n y | y-axis |" \ " mV | (10, 5) | True\n x | x-axis | mV | (10,) | True" actual = str(data_set) self.assertEqual(expected, actual)
def test_bind_data_set(self): mock_mongo_data_set_io = MagicMock() with patch('qilib.data_set.mongo_data_set_io_reader.MongoDataSetIO', return_value=mock_mongo_data_set_io) as mock_io, \ patch('qilib.data_set.mongo_data_set_io_reader.Thread') as thread: mock_mongo_data_set_io.get_document.return_value = { 'name': 'test', 'metadata': { 'default_array_name': 'array' } } reader = MongoDataSetIOReader(name='test') thread.assert_called_once() mock_io.assert_called_once_with('test', None, create_if_not_found=False, collection='data_sets', database='qilib') data_set = DataSet() reader.bind_data_set(data_set) self.assertEqual('test', data_set.name) self.assertEqual('array', data_set.default_array_name)
def test_setters(self): data_set = DataSet() self.assertEqual('', data_set.name) self.assertIsInstance(data_set.time_stamp, datetime.datetime) self.assertIsNone(data_set.user_data) self.assertEqual("", data_set.default_array_name) data_set.name = 'TheName' data_set.time_stamp = datetime.datetime(2018, 12, 24, 18) data_set.user_data = {'Data': 'stuff'} data_set.default_array_name = 'TheDefault' self.assertEqual('TheName', data_set.name) self.assertEqual(datetime.datetime(2018, 12, 24, 18), data_set.time_stamp) self.assertDictEqual({'Data': 'stuff'}, data_set.user_data) self.assertEqual("TheDefault", data_set.default_array_name)
def test_set_arrays_property(self): x_points = np.array(range(0, 2)) y_points = np.array(range(0, 2)) x = DataArray(name="x", label="x-axis", unit="mV", is_setpoint=True, preset_data=x_points) y = DataArray(name="y", label="y-axis", unit="mV", is_setpoint=True, preset_data=np.tile(np.array(y_points), [x.size, 1])) z = DataArray(name="z", label="z-axis", unit="ma", set_arrays=(x, y), preset_data=np.NaN * np.ones( (x_points.size, y_points.size))) data_set = DataSet(data_arrays=[z]) self.assertEqual(len(data_set.set_arrays), 2) self.assertEqual(data_set.set_arrays['x'], x) self.assertEqual(data_set.set_arrays['y'], y)