def test_indexing_channel_with_slice(opened_tdms_file, start, stop, step): """ Test indexing into a channel with a slice """ tdms_file, expected_data = opened_tdms_file for ((group, channel), expected_channel_data) in expected_data.items(): channel_object = tdms_file[group][channel] compare_arrays(channel_object[start:stop:step], expected_channel_data[start:stop:step])
def test_lazily_read_channel_data(test_file, expected_data): """Test reading channel data lazily""" with test_file.get_tempfile() as temp_file: with TdmsFile.open(temp_file.file) as tdms_file: for ((group, channel), expected_data) in expected_data.items(): actual_data = tdms_file[group][channel].read_data() assert actual_data.dtype == expected_data.dtype compare_arrays(actual_data, expected_data)
def test_reading_subset_of_data_for_scenario(test_file, expected_data, offset, length): """Test reading a subset of a channel's data """ assume(any(offset <= len(d) for d in expected_data.values())) with test_file.get_tempfile() as temp_file: with TdmsFile.open(temp_file.file) as tdms_file: for ((group, channel), expected_data) in expected_data.items(): actual_data = tdms_file[group][channel].read_data(offset, length) compare_arrays(actual_data, expected_data[offset:offset + length])
def test_indexing_channel_with_ellipsis(): """ Test indexing into a channel with ellipsis returns all data """ test_file, expected_data = scenarios.chunked_segment().values with test_file.get_tempfile() as temp_file: with TdmsFile.open(temp_file.file) as tdms_file: for ((group, channel), expected_channel_data) in expected_data.items(): channel_object = tdms_file[group][channel] compare_arrays(channel_object[...], expected_channel_data)
def test_read_with_index_file(test_file, expected_data): """ Test reading a file with an associated tdms_index file """ with test_file.get_tempfile_with_index() as tdms_file_path: tdms_file = TdmsFile.read(tdms_file_path) for ((group, channel), expected_channel_data) in expected_data.items(): channel_obj = tdms_file[group][channel] compare_arrays(channel_obj.data, expected_channel_data)
def test_indexing_channel_after_read_data(): """ Test indexing into a channel after reading all data """ test_file, expected_data = scenarios.chunked_segment().values with test_file.get_tempfile() as temp_file: tdms_file = TdmsFile.read(temp_file.file) for ((group, channel), expected_channel_data) in expected_data.items(): channel_object = tdms_file[group][channel] assert channel_object[0] == expected_channel_data[0] compare_arrays(channel_object[:], expected_channel_data)
def test_lazily_read_raw_channel_data_slice(): """Test reading raw channel data lazily""" test_file, expected_data = scenarios.single_segment_with_one_channel().values with test_file.get_tempfile() as temp_file: with TdmsFile.open(temp_file.file) as tdms_file: for ((group, channel), expected_data) in expected_data.items(): actual_data = tdms_file[group][channel].read_data(offset=1, length=2, scaled=False) assert actual_data.dtype == expected_data.dtype compare_arrays(actual_data, expected_data[1:3])
def test_read_raw_channel_data(): """Test reading raw channel data""" test_file, expected_data = scenarios.single_segment_with_one_channel().values with test_file.get_tempfile() as temp_file: tdms_file = TdmsFile.read(temp_file.file) for ((group, channel), expected_data) in expected_data.items(): actual_data = tdms_file[group][channel].read_data(scaled=False) assert actual_data.dtype == expected_data.dtype compare_arrays(actual_data, expected_data)
def test_stream_channel_data_chunks(test_file, expected_data): """Test streaming chunks of data for a single channel from a TDMS file """ with test_file.get_tempfile() as temp_file: with TdmsFile.open(temp_file.file) as tdms_file: for ((group, channel), expected_channel_data) in expected_data.items(): actual_data = [] for chunk in tdms_file[group][channel].data_chunks(): assert chunk.offset == len(actual_data) actual_data.extend(chunk[:]) compare_arrays(actual_data, expected_channel_data)
def test_iterate_channel_data_in_read_mode(): """Test iterating over channel data after reading all data """ test_file, expected_data = scenarios.chunked_segment().values with test_file.get_tempfile() as temp_file: tdms_file = TdmsFile.read(temp_file.file) for ((group, channel), expected_channel_data) in expected_data.items(): actual_data = [] for value in tdms_file[group][channel]: actual_data.append(value) compare_arrays(actual_data, expected_channel_data)
def test_indexing_scaled_channel_with_integer(): """ Test indexing into a channel with an integer index when the channel is scaled """ test_file, expected_data = scenarios.scaled_data().values with test_file.get_tempfile() as temp_file: with TdmsFile.open(temp_file.file) as tdms_file: for ((group, channel), expected_channel_data) in expected_data.items(): channel_object = tdms_file[group][channel] values = [] for i in range(len(channel_object)): values.append(channel_object[i]) compare_arrays(values, expected_channel_data)
def test_indexing_channel_with_integer_and_caching(): """ Test indexing into a channel with an integer index, reusing the same file to test caching """ test_file, expected_data = scenarios.chunked_segment().values with test_file.get_tempfile() as temp_file: with TdmsFile.open(temp_file.file) as tdms_file: for ((group, channel), expected_channel_data) in expected_data.items(): channel_object = tdms_file[group][channel] values = [] for i in range(len(channel_object)): values.append(channel_object[i]) compare_arrays(values, expected_channel_data)
def test_read_channel_data(test_file, expected_data): """Test reading data""" with test_file.get_tempfile() as temp_file: tdms_data = TdmsFile.read(temp_file.file) for ((group, channel), expected_data) in expected_data.items(): channel_obj = tdms_data[group][channel] actual_data = channel_obj.data assert actual_data.dtype == expected_data.dtype assert channel_obj.dtype == expected_data.dtype compare_arrays(actual_data, expected_data)
def test_iterate_file_and_groups(): """ Test iterating over TdmsFile and TdmsGroup uses key values """ test_file, expected_data = scenarios.chunked_segment().values with test_file.get_tempfile() as temp_file: tdms_file = TdmsFile.read(temp_file.file) for group_name in tdms_file: group = tdms_file[group_name] for channel_name in group: channel = group[channel_name] expected_channel_data = expected_data[(group_name, channel_name)] compare_arrays(channel.data, expected_channel_data)
def test_read_file_passed_as_pathlib_path(): """ Test reading a file when using a pathlib Path object """ import pathlib test_file, expected_data = scenarios.single_segment_with_one_channel().values with test_file.get_tempfile_with_index() as tdms_file_path_str: tdms_file_path = pathlib.Path(tdms_file_path_str) tdms_file = TdmsFile.read(tdms_file_path) for ((group, channel), expected_channel_data) in expected_data.items(): channel_obj = tdms_file[group][channel] compare_arrays(channel_obj.data, expected_channel_data)
def test_lazily_read_channel_data_with_file_path(): """Test reading channel data lazily after initialising with a file path """ test_file, expected_data = scenarios.single_segment_with_one_channel().values temp_file = test_file.get_tempfile(delete=False) try: temp_file.file.close() with TdmsFile.open(temp_file.name) as tdms_file: for ((group, channel), expected_data) in expected_data.items(): actual_data = tdms_file[group][channel].read_data() assert actual_data.dtype == expected_data.dtype compare_arrays(actual_data, expected_data) finally: os.remove(temp_file.name)
def test_indexing_and_iterating_data_chunks(): """Test streaming chunks of data from a TDMS file and indexing into chunks """ test_file, expected_data = scenarios.single_segment_with_two_channels().values data_arrays = defaultdict(list) with test_file.get_tempfile() as temp_file: with TdmsFile.open(temp_file.file) as tdms_file: for chunk in tdms_file.data_chunks(): for (group, channel) in expected_data.keys(): key = (group, channel) channel_chunk = chunk[group][channel] data_arrays[key].extend(list(channel_chunk)) for ((group, channel), expected_data) in expected_data.items(): actual_data = data_arrays[(group, channel)] compare_arrays(actual_data, expected_data)
def test_stream_data_chunks(test_file, expected_data): """Test streaming chunks of data from a TDMS file """ data_arrays = defaultdict(list) with test_file.get_tempfile() as temp_file: with TdmsFile.open(temp_file.file) as tdms_file: for chunk in tdms_file.data_chunks(): for group in chunk.groups(): for channel in group.channels(): key = (group.name, channel.name) assert channel.offset == len(data_arrays[key]) data_arrays[key].extend(channel[:]) for ((group, channel), expected_data) in expected_data.items(): actual_data = data_arrays[(group, channel)] compare_arrays(actual_data, expected_data)
def test_timestamp_data(tmp_path): """ Test conversion of timestamp channel data to HDF HDF doesn't support timestamps natively, so these are converted to strings """ test_file, expected_data = scenarios.timestamp_data().values tdms_data = test_file.load() h5_path = tmp_path / 'h5_timestamp_test.h5' h5 = tdms_data.as_hdf(h5_path) for (group, channel), expected_values in expected_data.items(): h5_channel = h5[group][channel] assert h5_channel.dtype.kind == 'S' expected_strings = np.datetime_as_string(expected_values, unit='us', timezone='UTC') expected_ascii = [s.encode('ascii') for s in expected_strings] compare_arrays(h5_channel[...], expected_ascii) h5.close()