def test_incremental_write(self): data = DataSet1D(location=self.loc_provider, name='test_incremental') location = data.location data_copy = DataSet1D(False) # # empty the data and mark it as unmodified data.x_set[:] = float('nan') data.y[:] = float('nan') data.x_set.modified_range = None data.y.modified_range = None # simulate writing after every value comes in, even within # one row (x comes first, it's the setpoint) for i, (x, y) in enumerate(zip(data_copy.x_set, data_copy.y)): data.x_set[i] = x self.formatter.write(data) data.y[i] = y self.formatter.write(data) data2 = DataSet(location=location, formatter=self.formatter) data2.read() self.checkArraysEqual(data2.arrays['x_set'], data_copy.arrays['x_set']) self.checkArraysEqual(data2.arrays['y'], data_copy.arrays['y']) self.formatter.close_file(data) self.formatter.close_file(data2)
def test_reading_into_existing_data_array(self): data = DataSet1D(location=self.loc_provider, name='test_read_existing') # closing before file is written should not raise error self.formatter.write(data) data2 = DataSet(location=data.location, formatter=self.formatter) d_array = DataArray( name='dummy', array_id='x_set', # existing array id in data label='bla', unit='a.u.', is_setpoint=False, set_arrays=(), preset_data=np.zeros(5)) data2.add_array(d_array) # test if d_array refers to same as array x_set in dataset self.assertTrue(d_array is data2.arrays['x_set']) data2.read() # test if reading did not overwrite dataarray self.assertTrue(d_array is data2.arrays['x_set']) # Testing if data was correctly updated into dataset self.checkArraysEqual(data2.arrays['x_set'], data.arrays['x_set']) self.checkArraysEqual(data2.arrays['y'], data.arrays['y']) self.formatter.close_file(data) self.formatter.close_file(data2)
def test_from_server(self, gdm_mock): mock_dm = MockDataManager() gdm_mock.return_value = mock_dm mock_dm.location = 'Mars' mock_dm.live_data = MockLive() # wrong location or False location - converts to local data = DataSet(location='Jupiter', data_manager=True, mode=DataMode.PULL_FROM_SERVER) self.assertEqual(data.mode, DataMode.LOCAL) data = DataSet(location=False, data_manager=True, mode=DataMode.PULL_FROM_SERVER) self.assertEqual(data.mode, DataMode.LOCAL) # location matching server - stays in server mode data = DataSet(location='Mars', data_manager=True, mode=DataMode.PULL_FROM_SERVER, formatter=MockFormatter()) self.assertEqual(data.mode, DataMode.PULL_FROM_SERVER) self.assertEqual(data.arrays, MockLive.arrays) # cannot write except in LOCAL mode with self.assertRaises(RuntimeError): data.write() # cannot finalize in PULL_FROM_SERVER mode with self.assertRaises(RuntimeError): data.finalize() # now test when the server says it's not there anymore mock_dm.location = 'Saturn' data.sync() self.assertEqual(data.mode, DataMode.LOCAL) self.assertEqual(data.has_read_data, True) # now it's LOCAL so we *can* write. data.write() self.assertEqual(data.has_written_data, True) # location=False: write, read and sync are noops. data.has_read_data = False data.has_written_data = False data.location = False data.write() data.read() data.sync() self.assertEqual(data.has_read_data, False) self.assertEqual(data.has_written_data, False)
def test_full_write_read_2D(self): """ Test writing and reading a file back in """ data = DataSet2D(location=self.loc_provider, name='test2D') self.formatter.write(data) # Test reading the same file through the DataSet.read data2 = DataSet(location=data.location, formatter=self.formatter) data2.read() self.checkArraysEqual(data2.x_set, data.x_set) self.checkArraysEqual(data2.y_set, data.y_set) self.checkArraysEqual(data2.z, data.z) self.formatter.close_file(data) self.formatter.close_file(data2)
def test_metadata_write_read(self): """ Test is based on the snapshot of the 1D dataset. Having a more complex snapshot in the metadata would be a better test. """ data = DataSet1D(location=self.loc_provider, name='test_metadata') data.snapshot() # gets the snapshot, not added upon init self.formatter.write(data) # write_metadata is included in write data2 = DataSet(location=data.location, formatter=self.formatter) data2.read() self.formatter.close_file(data) self.formatter.close_file(data2) metadata_equal, err_msg = compare_dictionaries( data.metadata, data2.metadata, 'original_metadata', 'loaded_metadata') self.assertTrue(metadata_equal, msg='\n'+err_msg)
def test_full_write_read_1D(self): """ Test writing and reading a file back in """ # location = self.locations[0] data = DataSet1D(name='test1D_full_write', location=self.loc_provider) # print('Data location:', os.path.abspath(data.location)) self.formatter.write(data) # Used because the formatter has no nice find file method # Test reading the same file through the DataSet.read data2 = DataSet(location=data.location, formatter=self.formatter) data2.read() self.checkArraysEqual(data2.x_set, data.x_set) self.checkArraysEqual(data2.y, data.y) self.formatter.close_file(data) self.formatter.close_file(data2)
def test_loop_writing_2D(self): # pass station = Station() MockPar = MockParabola(name='Loop_writing_test_2D') station.add_component(MockPar) loop = Loop(MockPar.x[-100:100:20]).loop(MockPar.y[-50:50:10]).each( MockPar.skewed_parabola) data1 = loop.run(name='MockLoop_hdf5_test', formatter=self.formatter) data2 = DataSet(location=data1.location, formatter=self.formatter) data2.read() for key in data2.arrays.keys(): self.checkArraysEqual(data2.arrays[key], data1.arrays[key]) metadata_equal, err_msg = compare_dictionaries(data1.metadata, data2.metadata, 'original_metadata', 'loaded_metadata') self.assertTrue(metadata_equal, msg='\n' + err_msg) self.formatter.close_file(data1) self.formatter.close_file(data2)
] arrays4 = [data1, data2, data3] data_set_2 = new_data( arrays=arrays3, location=None, loc_record={ 'name': 'T1', 'label': 'Vread_sweep' }, io=NewIO, ) data_set_2.save_metadata() test_location = '2017-08-18/20-40-19_T1_Vread_sweep' data_set_3 = DataSet( location=test_location, io=NewIO, ) data_set_3.read() AWGpara_array = data_set_3.arrays['AWGpara_set'].ndarray index0_array = data_set_3.arrays['index0_set'].ndarray digitizer_array = data_set_3.arrays['digitizer_digitizer'].ndarray # #print('loop.data_set: %s' % LP.data_set) # #data = LP.run() #