def test_read_raw_timestamp_data(): """ Test reading timestamp data as a raw TDMS timestamps """ test_file = GeneratedFile() seconds = 3672033330 second_fractions = 1234567890 * 10**10 test_file.add_segment( ("kTocMetaData", "kTocRawData", "kTocNewObjList"), segment_objects_metadata( channel_metadata("/'group'/'channel1'", 0x44, 4), ), hexlify_value("<Q", 0) + hexlify_value("<q", seconds) + hexlify_value("<Q", second_fractions) + hexlify_value("<q", seconds) + hexlify_value("<Q", 0) + hexlify_value("<q", seconds + 1) + hexlify_value("<Q", second_fractions) + hexlify_value("<q", seconds + 1)) expected_seconds = np.array([seconds, seconds, seconds + 1, seconds + 1], np.dtype('int64')) expected_second_fractions = np.array( [0, second_fractions, 0, second_fractions], np.dtype('uint64')) with test_file.get_tempfile() as temp_file: tdms_data = TdmsFile.read(temp_file.file, raw_timestamps=True) data = tdms_data['group']['channel1'][:] assert isinstance(data, TimestampArray) np.testing.assert_equal(data.seconds, expected_seconds) np.testing.assert_equal(data.second_fractions, expected_second_fractions)
def test_reading_subset_of_data(offset, length): channel_data = np.arange(0, 100, 1, dtype=np.int32) # Split data into different sized segments segment_data = [ channel_data[0:10], channel_data[10:20], channel_data[20:60], channel_data[60:80], channel_data[80:90], channel_data[90:100], ] hex_segment_data = [ "".join(hexlify_value('<i', x) for x in data) for data in segment_data ] test_file = GeneratedFile() test_file.add_segment( ("kTocMetaData", "kTocRawData", "kTocNewObjList"), segment_objects_metadata(channel_metadata("/'group'/'channel1'", 3, 5), ), hex_segment_data[0]) for hex_data in hex_segment_data[1:]: test_file.add_segment(("kTocRawData", ), "", hex_data) with test_file.get_tempfile() as temp_file: with TdmsFile.open(temp_file.file) as tdms_file: channel_subset = tdms_file['group']['channel1'].read_data( offset, length) expected_data = channel_data[offset:offset + length] assert len(channel_subset) == len(expected_data) np.testing.assert_equal(channel_subset, expected_data)
def test_read_with_mismatching_index_file(): """ Test that reading data when the index file doesn't match the data file raises an error """ test_file = GeneratedFile() test_file.add_segment( ("kTocMetaData", "kTocRawData", "kTocNewObjList"), segment_objects_metadata( channel_metadata("/'group'/'channel1'", 3, 2), channel_metadata("/'group'/'channel2'", 3, 2), ), "01 00 00 00" "02 00 00 00" "03 00 00 00" "04 00 00 00" ) test_file.add_segment( ("kTocMetaData", "kTocRawData", "kTocNewObjList"), segment_objects_metadata( channel_metadata("/'group'/'channel1'", 3, 2), channel_metadata("/'group'/'channel2'", 3, 2), ), "01 00 00 00" "02 00 00 00" "03 00 00 00" "04 00 00 00" ) test_file_with_index = GeneratedFile() test_file_with_index.add_segment( ("kTocMetaData", "kTocRawData", "kTocNewObjList"), segment_objects_metadata( channel_metadata("/'group'/'channel1'", 3, 3), channel_metadata("/'group'/'channel2'", 3, 3), ), "01 00 00 00" "02 00 00 00" "03 00 00 00" "04 00 00 00" "05 00 00 00" "06 00 00 00" ) test_file_with_index.add_segment( ("kTocMetaData", "kTocRawData", "kTocNewObjList"), segment_objects_metadata( channel_metadata("/'group'/'channel1'", 3, 3), channel_metadata("/'group'/'channel2'", 3, 3), ), "01 00 00 00" "02 00 00 00" "03 00 00 00" "04 00 00 00" "05 00 00 00" "06 00 00 00" ) with test_file.get_tempfile(delete=False) as tdms_file: with test_file_with_index.get_tempfile_with_index() as tdms_file_with_index_path: # Move index file from second file to match the name of the first file new_index_file = tdms_file.name + '_index' copyfile(tdms_file_with_index_path + '_index', new_index_file) try: tdms_file.file.close() with pytest.raises(ValueError) as exc_info: _ = TdmsFile.read(tdms_file.name) assert 'Check that the tdms_index file matches the tdms data file' in str(exc_info.value) finally: os.remove(new_index_file) os.remove(tdms_file.name)
def test_tdmsinfo_with_debug_output(caplog): test_file = GeneratedFile() test_file.add_segment(*basic_segment()) temp_file = test_file.get_tempfile(delete=False) try: temp_file.file.close() with patch.object(sys, 'argv', ['tdmsinfo.py', temp_file.name, '--debug']): tdmsinfo.main() assert "Reading metadata for object /'Group'/'Channel1'" in caplog.text finally: os.remove(temp_file.name)
def test_tdmsinfo(capsys): test_file = GeneratedFile() test_file.add_segment(*basic_segment()) temp_file = test_file.get_tempfile(delete=False) try: temp_file.file.close() with patch.object(sys, 'argv', ['tdmsinfo.py', temp_file.name]): tdmsinfo.main() captured = capsys.readouterr() assert "/'Group'/'Channel1'" in captured.out assert "wf_start_offset" not in captured.out finally: os.remove(temp_file.name)
def test_stream_data_chunks(): """Test streaming chunks of DAQmx data from a TDMS file """ properties = { "NI_Number_Of_Scales": (3, "01 00 00 00"), } scaler_1 = daqmx_scaler_metadata(0, 3, 0) scaler_2 = daqmx_scaler_metadata(0, 3, 2) metadata = segment_objects_metadata( root_metadata(), group_metadata(), daqmx_channel_metadata("Channel1", 4, [4], [scaler_1], properties), daqmx_channel_metadata("Channel2", 4, [4], [scaler_2], properties)) data = ( # Data for segment "01 00" "11 00" "02 00" "12 00" "03 00" "13 00" "04 00" "14 00" "05 00" "15 00" "06 00" "16 00" "07 00" "17 00" "08 00" "18 00") test_file = GeneratedFile() test_file.add_segment(segment_toc(), metadata, data) data_arrays = defaultdict(list) with test_file.get_tempfile() as temp_file: with TdmsFile.open(temp_file.file) as tdms_file: for chunk in tdms_file.data_chunks(): for group in chunk.groups(): for channel in group.channels(): key = (group.name, channel.name) assert channel.offset == len(data_arrays[key]) data_arrays[key].extend(channel[:]) expected_channel_data = { ("Group", "Channel1"): [1, 2, 3, 4, 5, 6, 7, 8], ("Group", "Channel2"): [17, 18, 19, 20, 21, 22, 23, 24], } for ((group, channel), expected_data) in expected_channel_data.items(): actual_data = data_arrays[(group, channel)] np.testing.assert_equal(actual_data, expected_data)
def test_lazily_reading_a_subset_of_raw_channel_data(): """ Test loading a subset of raw scaler channel data from a DAQmx file """ # Single scale which is just the raw DAQmx scaler data properties = { "NI_Number_Of_Scales": (3, "01 00 00 00"), } scaler_1 = daqmx_scaler_metadata(0, 3, 0) scaler_2 = daqmx_scaler_metadata(0, 3, 2) metadata = segment_objects_metadata( root_metadata(), group_metadata(), daqmx_channel_metadata("Channel1", 4, [4], [scaler_1], properties), daqmx_channel_metadata("Channel2", 4, [4], [scaler_2], properties)) data = ( # Data for segment "01 00" "11 00" "02 00" "12 00" "03 00" "13 00" "04 00" "14 00") test_file = GeneratedFile() test_file.add_segment(segment_toc(), metadata, data) with test_file.get_tempfile() as temp_file: with TdmsFile.open(temp_file.file) as tdms_file: data_1 = tdms_file["Group"]["Channel1"].read_data(1, 2, scaled=False) assert len(data_1) == 1 assert data_1[0].dtype == np.int16 np.testing.assert_array_equal(data_1[0], [2, 3]) data_2 = tdms_file["Group"]["Channel2"].read_data(1, 2, scaled=False) assert len(data_2) == 1 assert data_2[0].dtype == np.int16 np.testing.assert_array_equal(data_2[0], [18, 19])
def test_read_raw_timestamp_properties(): """ Test reading timestamp properties as a raw TDMS timestamp """ test_file = GeneratedFile() second_fractions = 1234567890 * 10**10 properties = { "wf_start_time": (0x44, hexlify_value("<Q", second_fractions) + hexlify_value("<q", 3524551547)) } test_file.add_segment(("kTocMetaData", "kTocRawData", "kTocNewObjList"), segment_objects_metadata( channel_metadata("/'group'/'channel1'", 3, 2, properties), ), "01 00 00 00" "02 00 00 00") with test_file.get_tempfile() as temp_file: tdms_data = TdmsFile.read(temp_file.file, raw_timestamps=True) start_time = tdms_data['group']['channel1'].properties['wf_start_time'] assert start_time.seconds == 3524551547 assert start_time.second_fractions == second_fractions
def test_lazily_reading_channel(): """ Test loading channels individually from a DAQmx file """ # Single scale which is just the raw DAQmx scaler data properties = { "NI_Number_Of_Scales": (3, "01 00 00 00"), } scaler_1 = daqmx_scaler_metadata(0, 3, 0) scaler_2 = daqmx_scaler_metadata(0, 3, 2) metadata = segment_objects_metadata( root_metadata(), group_metadata(), daqmx_channel_metadata("Channel1", 4, [4], [scaler_1], properties), daqmx_channel_metadata("Channel2", 4, [4], [scaler_2], properties)) data = ( # Data for segment "01 00" "11 00" "02 00" "12 00" "03 00" "13 00" "04 00" "14 00" ) test_file = GeneratedFile() test_file.add_segment(segment_toc(), metadata, data) with test_file.get_tempfile() as temp_file: with TdmsFile.open(temp_file.file) as tdms_file: data_1 = tdms_file["Group"]["Channel1"].read_data() assert data_1.dtype == np.int16 np.testing.assert_array_equal(data_1, [1, 2, 3, 4]) data_2 = tdms_file["Group"]["Channel2"].read_data() assert data_2.dtype == np.int16 np.testing.assert_array_equal(data_2, [17, 18, 19, 20])
def test_read_raw_data(offset, length): # Single scale which is just the raw DAQmx scaler data properties = { "NI_Number_Of_Scales": (3, "01 00 00 00"), } scaler_1 = daqmx_scaler_metadata(1, 3, 0) scaler_2 = daqmx_scaler_metadata(2, 3, 2) metadata = segment_objects_metadata( root_metadata(), group_metadata(), daqmx_channel_metadata("Channel1", 4, [4], [scaler_1], properties), daqmx_channel_metadata("Channel2", 4, [4], [scaler_2], properties)) data = ( # Data for segment "01 00" "11 00" "02 00" "12 00" "03 00" "13 00" "04 00" "14 00" ) test_file = GeneratedFile() test_file.add_segment(segment_toc(), metadata, data) end = None if length is None else offset + length with test_file.get_tempfile() as temp_file: tdms_file = TdmsFile.read(temp_file.file) data_1 = tdms_file["Group"]["Channel1"].read_data(offset=offset, length=length, scaled=False) assert data_1[1].dtype == np.int16 np.testing.assert_array_equal(data_1[1], [1, 2, 3, 4][offset:end]) data_2 = tdms_file["Group"]["Channel2"].read_data(offset=offset, length=length, scaled=False) assert data_2[2].dtype == np.int16 np.testing.assert_array_equal(data_2[2], [17, 18, 19, 20][offset:end])