示例#1
0
    def startReading(pathToData, guiQueue, diagnosisQueue):
        with TdmsFile.open(pathToData) as tdms_file:
            for group in tdms_file.groups():
                all_group_channels = group.channels()
                sensorNumber = 0
                for channel in all_group_channels:
                    logging.info("Reading sensor: {}".format(
                        sensorDictionary[sensorNumber]))

                    for chunk in channel.data_chunks():
                        channel_chunk_data = chunk[:]

                        if guiQueue is not None:
                            with GUILOCK:
                                guiQueue.put(channel_chunk_data)
                        if diagnosisQueue is not None:
                            with DIAGNOSIS_LOCK:
                                diagnosisQueue.put(channel_chunk_data)

                        time.sleep(0.9)
                        if keyboard.is_pressed("q"):
                            sys.exit()

                    with CURRENT_SENSOR_LOCK:
                        global CURRENT_SENSOR
                        if CURRENT_SENSOR != 2:
                            CURRENT_SENSOR += 1
                            sensorNumber += 1
                        else:
                            return
 def _get_tdms_file_property(self, log_file_base_path: str,
                             log_file_name: str,
                             property_name: str) -> Optional[str]:
     log_file_path = Path(log_file_base_path) / log_file_name
     with TdmsFile.open(str(log_file_path)) as tdms_file:
         real_property_name = f"Test_properties~{property_name}"
         return tdms_file.properties.get(real_property_name)
示例#3
0
def on_test_file(test_file, lazy_load, func):
    if lazy_load:
        with test_file.get_tempfile() as temp_file:
            with TdmsFile.open(temp_file) as tdms_file:
                return func(tdms_file)
    else:
        return func(test_file.load())
示例#4
0
def test_reading_subset_of_data(offset, length):
    channel_data = np.arange(0, 100, 1, dtype=np.int32)
    # Split data into different sized segments
    segment_data = [
        channel_data[0:10],
        channel_data[10:20],
        channel_data[20:60],
        channel_data[60:80],
        channel_data[80:90],
        channel_data[90:100],
    ]
    hex_segment_data = [
        "".join(hexlify_value('<i', x) for x in data) for data in segment_data
    ]
    test_file = GeneratedFile()
    test_file.add_segment(
        ("kTocMetaData", "kTocRawData", "kTocNewObjList"),
        segment_objects_metadata(channel_metadata("/'group'/'channel1'", 3,
                                                  5), ), hex_segment_data[0])
    for hex_data in hex_segment_data[1:]:
        test_file.add_segment(("kTocRawData", ), "", hex_data)

    with test_file.get_tempfile() as temp_file:
        with TdmsFile.open(temp_file.file) as tdms_file:
            channel_subset = tdms_file['group']['channel1'].read_data(
                offset, length)
            expected_data = channel_data[offset:offset + length]
            assert len(channel_subset) == len(expected_data)
            np.testing.assert_equal(channel_subset, expected_data)
示例#5
0
def load_tdms_video(path, meta_path, frame=None):
    """
    Calculate the 3 dimensions for a given video from TDMS metadata and reshape the
    video to these dimensions.

    Parameters
    ----------
    path : pathlib.Path
        File path to TDMS video file.

    meta_path : pathlib.Path
        File path to TDMS file containing metadata about the video.

    frame : int, optional
        Read this one single frame rather than them all.

    """
    fps, height, duration = _parse_tdms_metadata(meta_path)

    if frame is None:
        video = read_tdms(path)
        width = int(video.size / (duration * height))
        return video.values.reshape(duration, height, width), fps

    with TdmsFile.open(path) as tdms_file:
        group = tdms_file.groups()[0]
        channel = group.channels()[0]
        width = int(len(channel) / (duration * height))
        length = width * height
        start = frame * length
        video = channel[start:start + length]
        return video.reshape(height, width), fps
示例#6
0
def test_stream_scaled_data_chunks(benchmark):
    """ Benchmark streaming channel data when the data is scaled
    """
    properties = {
        "NI_Number_Of_Scales": (3, "01 00 00 00"),
        "NI_Scale[0]_Scale_Type":
        (0x20, hexlify_value("<I", len("Linear")) + string_hexlify("Linear")),
        "NI_Scale[0]_Linear_Slope": (10, hexlify_value("<d", 2.0)),
        "NI_Scale[0]_Linear_Y_Intercept": (10, hexlify_value("<d", 10.0))
    }
    test_file = GeneratedFile()
    data_array = np.arange(0, 1000, dtype=np.dtype('int32'))
    data = data_array.tobytes()
    test_file.add_segment(
        ("kTocMetaData", "kTocRawData", "kTocNewObjList"),
        segment_objects_metadata(
            channel_metadata("/'group'/'channel1'", TDS_TYPE_INT32, 100,
                             properties), ),
        data,
        binary_data=True)
    for _ in range(0, 9):
        test_file.add_segment(("kTocRawData", ), "", data, binary_data=True)

    with TdmsFile.open(test_file.get_bytes_io_file()) as tdms_file:
        channel = tdms_file['group']['channel1']
        channel_data = benchmark(stream_chunks, channel)

        channel_data = np.concatenate(channel_data)
        expected_data = np.tile(10.0 + 2.0 * data_array, 10)
        np.testing.assert_equal(channel_data, expected_data)
示例#7
0
def opened_tdms_file():
    """ Allow re-use of an opened TDMS file
    """
    test_file, expected_data = scenarios.chunked_segment().values
    with test_file.get_tempfile() as temp_file:
        with TdmsFile.open(temp_file.file) as tdms_file:
            yield tdms_file, expected_data
示例#8
0
def test_indexing_timestamp_channel_with_integer(index):
    """ Test indexing into a timestamp data channel with an integer index
    """
    test_file, expected_data = scenarios.timestamp_data().values
    with test_file.get_tempfile() as temp_file:
        with TdmsFile.open(temp_file.file) as tdms_file:
            for ((group, channel), expected_channel_data) in expected_data.items():
                channel_object = tdms_file[group][channel]
                assert channel_object[index] == expected_channel_data[index]
示例#9
0
def test_lazily_read_channel_data(test_file, expected_data):
    """Test reading channel data lazily"""

    with test_file.get_tempfile() as temp_file:
        with TdmsFile.open(temp_file.file) as tdms_file:
            for ((group, channel), expected_data) in expected_data.items():
                actual_data = tdms_file[group][channel].read_data()
                assert actual_data.dtype == expected_data.dtype
                compare_arrays(actual_data, expected_data)
示例#10
0
def test_reading_subset_of_data_for_scenario(test_file, expected_data, offset, length):
    """Test reading a subset of a channel's data
    """
    assume(any(offset <= len(d) for d in expected_data.values()))
    with test_file.get_tempfile() as temp_file:
        with TdmsFile.open(temp_file.file) as tdms_file:
            for ((group, channel), expected_data) in expected_data.items():
                actual_data = tdms_file[group][channel].read_data(offset, length)
                compare_arrays(actual_data, expected_data[offset:offset + length])
示例#11
0
def test_slice_contiguous_data_channel(benchmark):
    """ Benchmark reading a slice of data from a contiguous data file
    """
    with TdmsFile.open(get_contiguous_file().get_bytes_io_file()) as tdms_file:
        channel = tdms_file['group']['channel3']
        channel_data = benchmark(get_slice, channel, 5555, 6555)

        expected_data = np.repeat([3], 1000)
        np.testing.assert_equal(channel_data, expected_data)
示例#12
0
def test_indexing_channel_with_ellipsis():
    """ Test indexing into a channel with ellipsis returns all data
    """
    test_file, expected_data = scenarios.chunked_segment().values
    with test_file.get_tempfile() as temp_file:
        with TdmsFile.open(temp_file.file) as tdms_file:
            for ((group, channel), expected_channel_data) in expected_data.items():
                channel_object = tdms_file[group][channel]
                compare_arrays(channel_object[...], expected_channel_data)
示例#13
0
def import_data(file):
    """Read tdms file"""
    df = None

    channel_list = []
    group_list = []

    with TdmsFile.open(file) as f:

        print("opening", file)

        for group in f.groups():
            group_name = group.name
            group_list.append(group_name)

            for channel in group.channels():
                channel_name = channel.name
                channel_list.append(channel_name)
                #channel_name = channel.name
                #Access dictionary of properties:
                #properties = channel.properties
                #Access numpy array of data for channel:
                #data = channel[:]

        # select group
        selected_group = group_list[0]

        time = channel.time_track()
        # convert tim = numpy array to pandas df
        time_df = pd.DataFrame(time, columns = ['Time'])
        print(time_df[:10])

        #data_df = f["analog"].as_dataframe()
        data_df = f[selected_group].as_dataframe()
        print(data_df[:10])

        # merge two pandas df
        merged_df = pd.concat([time_df, data_df], axis=1).reindex(time_df.index)
        print(merged_df.head())
        print(merged_df.tail())
        print(merged_df.shape)

    #time = channel.time_track()
    #print(time)

    #print(f.groups())
    #print(f.channels())
    #channel = tdms_file[group_name][channel_name]
    #channel_data = channel[:]

#    tdms_file = TdmsFile.read(file)
#
#    group_list = []
#    channel_list = []

    return merged_df
示例#14
0
def test_stream_contiguous_data_channel(benchmark):
    """ Benchmark streaming channel data from a contiguous data file
    """
    with TdmsFile.open(get_contiguous_file().get_bytes_io_file()) as tdms_file:
        channel = tdms_file['group']['channel3']
        channel_data = benchmark(stream_chunks, channel)

        channel_data = np.concatenate(channel_data)
        expected_data = np.repeat([3], 10000)
        np.testing.assert_equal(channel_data, expected_data)
示例#15
0
def test_read_interleaved_data_channel(benchmark):
    """ Benchmark reading a single channel's data from an interleaved data file
    """
    with TdmsFile.open(
            get_interleaved_file().get_bytes_io_file()) as tdms_file:
        channel = tdms_file['group']['channel3']
        channel_data = benchmark(read_channel_data, channel)

        expected_data = np.repeat([3], 10000)
        np.testing.assert_equal(channel_data, expected_data)
示例#16
0
def test_index_contiguous_data_channel(benchmark):
    """ Benchmark reading a data from a contiguous data file using integer indices
    """
    with TdmsFile.open(get_contiguous_file().get_bytes_io_file()) as tdms_file:
        channel = tdms_file['group']['channel3']
        channel_data = np.zeros(10000, dtype=channel.dtype)
        benchmark(index_values, channel, channel_data)

        expected_data = np.repeat([3], 10000)
        np.testing.assert_equal(channel_data, expected_data)
示例#17
0
def test_indexing_channel_with_invalid_integer_raises_error(index):
    """ Test indexing into a channel with an invalid integer index
    """
    test_file, expected_data = scenarios.chunked_segment().values
    with test_file.get_tempfile() as temp_file:
        with TdmsFile.open(temp_file.file) as tdms_file:
            for ((group, channel), expected_channel_data) in expected_data.items():
                channel_object = tdms_file[group][channel]
                with pytest.raises(IndexError):
                    _ = channel_object[index]
示例#18
0
def test_lazily_read_raw_channel_data_slice():
    """Test reading raw channel data lazily"""

    test_file, expected_data = scenarios.single_segment_with_one_channel().values
    with test_file.get_tempfile() as temp_file:
        with TdmsFile.open(temp_file.file) as tdms_file:
            for ((group, channel), expected_data) in expected_data.items():
                actual_data = tdms_file[group][channel].read_data(offset=1, length=2, scaled=False)
                assert actual_data.dtype == expected_data.dtype
                compare_arrays(actual_data, expected_data[1:3])
示例#19
0
def test_multiple_close_after_open():
    test_file, _ = scenarios.single_segment_with_one_channel().values
    temp_file = test_file.get_tempfile(delete=False)
    try:
        temp_file.file.close()
        with TdmsFile.open(temp_file.name) as tdms_data:
            tdms_data.close()
        tdms_data.close()
    finally:
        os.remove(temp_file.name)
示例#20
0
def test_invalid_length_in_read_data_throws():
    """ Exception is thrown when reading a subset of data with an invalid length
    """
    test_file, expected_data = scenarios.single_segment_with_one_channel().values
    group, channel = list(expected_data.keys())[0]
    with test_file.get_tempfile() as temp_file:
        with TdmsFile.open(temp_file.file) as tdms_file:
            with pytest.raises(ValueError) as exc_info:
                tdms_file[group][channel].read_data(0, -5)
            assert "length must be non-negative" in str(exc_info.value)
示例#21
0
def test_lazily_read_channel_data_with_channel_data_method():
    """Test reading channel data lazily using the channel_data method of TdmsFile
    """
    test_file, expected_data = scenarios.single_segment_with_two_channels(
    ).values
    with test_file.get_tempfile() as temp_file:
        with TdmsFile.open(temp_file.file) as tdms_file:
            for ((group, channel), expected_data) in expected_data.items():
                actual_data = tdms_file.channel_data(group, channel)
                assert actual_data.dtype == expected_data.dtype
                np.testing.assert_almost_equal(actual_data, expected_data)
示例#22
0
def test_stream_channel_data_chunks(test_file, expected_data):
    """Test streaming chunks of data for a single channel from a TDMS file
    """
    with test_file.get_tempfile() as temp_file:
        with TdmsFile.open(temp_file.file) as tdms_file:
            for ((group, channel), expected_channel_data) in expected_data.items():
                actual_data = []
                for chunk in tdms_file[group][channel].data_chunks():
                    assert chunk.offset == len(actual_data)
                    actual_data.extend(chunk[:])
                compare_arrays(actual_data, expected_channel_data)
示例#23
0
def test_read_data_after_close_throws():
    """ Trying to read after opening and closing without reading data should throw
    """
    test_file, expected_data = scenarios.single_segment_with_one_channel().values
    group, channel = list(expected_data.keys())[0]
    with test_file.get_tempfile() as temp_file:
        with TdmsFile.open(temp_file.file) as tdms_file:
            pass
        with pytest.raises(RuntimeError) as exc_info:
            tdms_file[group][channel].read_data()
        assert "Cannot read data after the underlying TDMS reader is closed" in str(exc_info.value)
示例#24
0
def test_indexing_channel_with_invalid_type_raises_error(index):
    """ Test indexing into a channel with an invalid index type
    """
    test_file, expected_data = scenarios.chunked_segment().values
    with test_file.get_tempfile() as temp_file:
        with TdmsFile.open(temp_file.file) as tdms_file:
            for ((group, channel), expected_channel_data) in expected_data.items():
                channel_object = tdms_file[group][channel]
                with pytest.raises(TypeError) as exc_info:
                    _ = channel_object[index]
                assert "Invalid index type" in str(exc_info.value)
示例#25
0
def test_indexing_channel_with_zero_step_raises_error():
    """ Test indexing into a channel with a slice with zero step size raises an error
    """
    test_file, expected_data = scenarios.chunked_segment().values
    with test_file.get_tempfile() as temp_file:
        with TdmsFile.open(temp_file.file) as tdms_file:
            for ((group, channel), expected_channel_data) in expected_data.items():
                channel_object = tdms_file[group][channel]
                with pytest.raises(ValueError) as exc_info:
                    _ = channel_object[::0]
                assert str(exc_info.value) == "Step size cannot be zero"
 def _verify_tdms_file_has_note(
     log_file_base_path: str, log_file_name: str, expected_note_contents: str
 ) -> None:
     log_file_path = Path(log_file_base_path) / log_file_name
     with TdmsFile.open(str(log_file_path)) as tdms_file:
         user_notes_group = tdms_file["Test Information"]
         user_notes_channel = user_notes_group["User Notes"]
         assert user_notes_channel is not None
         user_notes_time_channel = user_notes_group["User Notes_time"]
         assert user_notes_time_channel is not None
         assert 1 == len(user_notes_channel)
         assert expected_note_contents == user_notes_channel[0]
示例#27
0
def test_iterate_channel_data_in_open_mode():
    """Test iterating over channel data after opening a file without reading data
    """
    test_file, expected_data = scenarios.chunked_segment().values

    with test_file.get_tempfile() as temp_file:
        with TdmsFile.open(temp_file.file) as tdms_file:
            for ((group, channel), expected_channel_data) in expected_data.items():
                actual_data = []
                for value in tdms_file[group][channel]:
                    actual_data.append(value)
                compare_arrays(actual_data, expected_channel_data)
示例#28
0
def test_indexing_channel_with_integer_and_caching():
    """ Test indexing into a channel with an integer index, reusing the same file to test caching
    """
    test_file, expected_data = scenarios.chunked_segment().values
    with test_file.get_tempfile() as temp_file:
        with TdmsFile.open(temp_file.file) as tdms_file:
            for ((group, channel), expected_channel_data) in expected_data.items():
                channel_object = tdms_file[group][channel]
                values = []
                for i in range(len(channel_object)):
                    values.append(channel_object[i])
                compare_arrays(values, expected_channel_data)
示例#29
0
def test_indexing_scaled_channel_with_integer():
    """ Test indexing into a channel with an integer index when the channel is scaled
    """
    test_file, expected_data = scenarios.scaled_data().values
    with test_file.get_tempfile() as temp_file:
        with TdmsFile.open(temp_file.file) as tdms_file:
            for ((group, channel), expected_channel_data) in expected_data.items():
                channel_object = tdms_file[group][channel]
                values = []
                for i in range(len(channel_object)):
                    values.append(channel_object[i])
                compare_arrays(values, expected_channel_data)
示例#30
0
def test_streaming_to_hdf(tmp_path):
    """ Test conversion of channel data to HDF when streaming data from disk
    """
    test_file, expected_data = scenarios.chunked_segment().values

    with test_file.get_tempfile() as temp_file:
        with TdmsFile.open(temp_file.file) as tdms_file:
            h5_path = tmp_path / 'h5_streaming_data_test.h5'
            h5 = tdms_file.as_hdf(h5_path)

    for ((group, channel), expected_data) in expected_data.items():
        h5_channel = h5[group][channel]
        np.testing.assert_almost_equal(h5_channel[...], expected_data)
    h5.close()