Exemple #1
0
def test_daqmx_debug_logging(caplog):
    """ Test loading a DAQmx file with debug logging enabled
    """
    scaler_metadata = daqmx_scaler_metadata(0, 3, 0)
    metadata = segment_objects_metadata(
        root_metadata(),
        group_metadata(),
        daqmx_channel_metadata("Channel1", 4, [2], [scaler_metadata]))
    data = (
        "01 00"
        "02 00"
        "FF FF"
        "FE FF"
    )

    test_file = GeneratedFile()
    test_file.add_segment(segment_toc(), metadata, data)

    log_manager.set_level(logging.DEBUG)
    _ = test_file.load()

    assert "Reading metadata for object /'Group'/'Channel1' with index header 0x00001269" in caplog.text
    assert "scaler_type=4713" in caplog.text
    assert "scale_id=0" in caplog.text
    assert "data_type=Int16" in caplog.text
Exemple #2
0
def test_two_channel_i16():
    """ Test loading a DAQmx file with two channels of I16 data
    """

    scaler_1 = daqmx_scaler_metadata(0, 3, 0)
    scaler_2 = daqmx_scaler_metadata(0, 3, 2)
    metadata = segment_objects_metadata(
        root_metadata(),
        group_metadata(),
        daqmx_channel_metadata("Channel1", 4, [4], [scaler_1]),
        daqmx_channel_metadata("Channel2", 4, [4], [scaler_2]))
    data = (
        # Data for segment
        "01 00"
        "11 00"
        "02 00"
        "12 00"
        "03 00"
        "13 00"
        "04 00"
        "14 00"
    )

    test_file = GeneratedFile()
    test_file.add_segment(segment_toc(), metadata, data)
    tdms_data = test_file.load()

    data_1 = tdms_data["Group"]["Channel1"].raw_data
    assert data_1.dtype == np.int16
    np.testing.assert_array_equal(data_1, [1, 2, 3, 4])

    data_2 = tdms_data["Group"]["Channel2"].raw_data
    assert data_2.dtype == np.int16
    np.testing.assert_array_equal(data_2, [17, 18, 19, 20])
Exemple #3
0
def test_slash_and_space_in_name():
    """Test name like '01/02/03 something'"""

    group_1 = "01/02/03 something"
    channel_1 = "04/05/06 another thing"
    group_2 = "01/02/03 a"
    channel_2 = "04/05/06 b"

    test_file = GeneratedFile()
    test_file.add_segment(
        ("kTocMetaData", "kTocRawData", "kTocNewObjList"),
        segment_objects_metadata(
            channel_metadata("/'{0}'/'{1}'".format(group_1, channel_1), 3, 2),
            channel_metadata("/'{0}'/'{1}'".format(group_2, channel_2), 3, 2),
        ), "01 00 00 00"
        "02 00 00 00"
        "03 00 00 00"
        "04 00 00 00")

    tdms_data = test_file.load()

    assert len(tdms_data.groups()) == 2
    assert len(tdms_data[group_1].channels()) == 1
    assert len(tdms_data[group_2].channels()) == 1
    data_1 = tdms_data[group_1][channel_1].data
    assert len(data_1) == 2
    data_2 = tdms_data[group_2][channel_2].data
    assert len(data_2) == 2
Exemple #4
0
def test_floating_point_data_types(tmp_path):
    """ Test conversion of f32 and f64 types to HDF
    """
    test_file = GeneratedFile()
    test_file.add_segment(
        ("kTocMetaData", "kTocRawData", "kTocNewObjList"),
        segment_objects_metadata(
            channel_metadata("/'group'/'f32'", 9, 4),
            channel_metadata("/'group'/'f64'", 10, 4),
        ),
        hexlify_value('<f', 1) +
        hexlify_value('<f', 2) +
        hexlify_value('<f', 3) +
        hexlify_value('<f', 4) +
        hexlify_value('<d', 1) +
        hexlify_value('<d', 2) +
        hexlify_value('<d', 3) +
        hexlify_value('<d', 4)
    )

    tdms_data = test_file.load()
    h5_path = tmp_path / 'h5_data_test.h5'
    h5 = tdms_data.as_hdf(h5_path)

    for chan, expected_dtype in [
            ('f32', np.dtype('float32')),
            ('f64', np.dtype('float64'))]:
        h5_channel = h5['group'][chan]
        assert h5_channel.dtype == expected_dtype
        np.testing.assert_almost_equal(h5_channel[...], [1.0, 2.0, 3.0, 4.0])
    h5.close()
Exemple #5
0
def test_single_channel_u32():
    """ Test loading a DAQmx file with a single channel of U32 data
    """

    scaler_metadata = daqmx_scaler_metadata(0, 4, 0)
    metadata = segment_objects_metadata(
        root_metadata(),
        group_metadata(),
        daqmx_channel_metadata("Channel1", 4, [4], [scaler_metadata]))
    data = (
        # Data for segment
        "01 00 00 00"
        "02 00 00 00"
        "FF FF FF FF"
        "FE FF FF FF"
    )

    test_file = GeneratedFile()
    test_file.add_segment(segment_toc(), metadata, data)
    tdms_data = test_file.load()

    data = tdms_data["Group"]["Channel1"].raw_data

    assert data.dtype == np.uint32
    np.testing.assert_array_equal(data, [1, 2, 2**32 - 1, 2**32 - 2])
Exemple #6
0
def test_multiple_scalers_with_same_type():
    """ Test loading a DAQmx file with one channel containing multiple
        format changing scalers of the same type
    """

    scaler_metadata = [
        daqmx_scaler_metadata(0, 3, 0),
        daqmx_scaler_metadata(1, 3, 2)
    ]
    metadata = segment_objects_metadata(
        root_metadata(), group_metadata(),
        daqmx_channel_metadata("Channel1", 4, [4], scaler_metadata))
    data = (
        # Data for segment
        "01 00"
        "11 00"
        "02 00"
        "12 00"
        "03 00"
        "13 00"
        "04 00"
        "14 00")

    test_file = GeneratedFile()
    test_file.add_segment(segment_toc(), metadata, data)
    tdms_data = test_file.load()
    channel = tdms_data["Group"]["Channel1"]

    scaler_0_data = channel.raw_scaler_data[0]
    assert scaler_0_data.dtype == np.int16
    np.testing.assert_array_equal(scaler_0_data, [1, 2, 3, 4])

    scaler_1_data = channel.raw_scaler_data[1]
    assert scaler_1_data.dtype == np.int16
    np.testing.assert_array_equal(scaler_1_data, [17, 18, 19, 20])
Exemple #7
0
def test_mixed_channel_widths():
    """ Test loading a DAQmx file with channels with different widths
    """

    scaler_1 = daqmx_scaler_metadata(0, 1, 0)
    scaler_2 = daqmx_scaler_metadata(0, 3, 1)
    scaler_3 = daqmx_scaler_metadata(0, 5, 3)
    metadata = segment_objects_metadata(
        root_metadata(), group_metadata(),
        daqmx_channel_metadata("Channel1", 4, [7], [scaler_1]),
        daqmx_channel_metadata("Channel2", 4, [7], [scaler_2]),
        daqmx_channel_metadata("Channel3", 4, [7], [scaler_3]))
    data = (
        # Data for segment
        "01 11 00 21 00 00 00"
        "02 12 00 22 00 00 00"
        "03 13 00 23 00 00 00"
        "04 14 00 24 00 00 00")

    test_file = GeneratedFile()
    test_file.add_segment(segment_toc(), metadata, data)
    tdms_data = test_file.load()

    data_1 = tdms_data["Group"]["Channel1"].raw_data
    assert data_1.dtype == np.int8
    np.testing.assert_array_equal(data_1, [1, 2, 3, 4])

    data_2 = tdms_data["Group"]["Channel2"].raw_data
    assert data_2.dtype == np.int16
    np.testing.assert_array_equal(data_2, [17, 18, 19, 20])

    data_3 = tdms_data["Group"]["Channel3"].raw_data
    assert data_3.dtype == np.int32
    np.testing.assert_array_equal(data_3, [33, 34, 35, 36])
Exemple #8
0
def test_get_len_of_group():
    """Test getting the length of a TdmsGroup
    """
    test_file = GeneratedFile()
    test_file.add_segment(*basic_segment())
    tdms_data = test_file.load()

    assert len(tdms_data['Group']) == 2
Exemple #9
0
def test_root_object_paths():
    """Test the group and channel properties for the root object"""
    test_file = GeneratedFile()
    test_file.add_segment(*basic_segment())
    tdms_data = test_file.load()

    obj = tdms_data.object()
    assert obj.group is None
    assert obj.channel is None
Exemple #10
0
def test_group_property_read():
    """Test reading property of a group"""

    test_file = GeneratedFile()
    test_file.add_segment(*basic_segment())
    tdms_data = test_file.load()

    group = tdms_data["Group"]
    assert group.properties["num"] == 10
Exemple #11
0
def test_group_object_paths():
    """Test the path and name properties for a group"""
    test_file = GeneratedFile()
    test_file.add_segment(*basic_segment())
    tdms_data = test_file.load()

    group = tdms_data["Group"]
    assert group.path == "/'Group'"
    assert group.name == "Group"
Exemple #12
0
def test_multiple_raw_data_buffers_with_different_widths():
    """ DAQmx with raw data buffers with different widths
    """

    scaler_1 = daqmx_scaler_metadata(0, 3, 0, 0)
    scaler_2 = daqmx_scaler_metadata(0, 3, 2, 0)
    scaler_3 = daqmx_scaler_metadata(0, 3, 4, 0)
    scaler_4 = daqmx_scaler_metadata(0, 5, 0, 1)
    scaler_5 = daqmx_scaler_metadata(0, 5, 4, 1)
    metadata = segment_objects_metadata(
        root_metadata(), group_metadata(),
        daqmx_channel_metadata("Channel1", 4, [6, 8], [scaler_1]),
        daqmx_channel_metadata("Channel2", 4, [6, 8], [scaler_2]),
        daqmx_channel_metadata("Channel3", 4, [6, 8], [scaler_3]),
        daqmx_channel_metadata("Channel4", 4, [6, 8], [scaler_4]),
        daqmx_channel_metadata("Channel5", 4, [6, 8], [scaler_5]))
    data = ("01 00"
            "02 00"
            "03 00"
            "04 00"
            "05 00"
            "06 00"
            "07 00"
            "08 00"
            "09 00"
            "0A 00"
            "0B 00"
            "0C 00"
            "0D 00 00 00"
            "0E 00 00 00"
            "0F 00 00 00"
            "10 00 00 00"
            "11 00 00 00"
            "12 00 00 00"
            "13 00 00 00"
            "14 00 00 00")

    test_file = GeneratedFile()
    test_file.add_segment(segment_toc(), metadata, data)
    tdms_data = test_file.load()

    data_1 = tdms_data["Group"]["Channel1"].raw_data
    data_2 = tdms_data["Group"]["Channel2"].raw_data
    data_3 = tdms_data["Group"]["Channel3"].raw_data
    data_4 = tdms_data["Group"]["Channel4"].raw_data
    data_5 = tdms_data["Group"]["Channel5"].raw_data

    for data in [data_1, data_2, data_3]:
        assert data.dtype == np.int16
    for data in [data_4, data_5]:
        assert data.dtype == np.int32

    np.testing.assert_array_equal(data_1, [1, 4, 7, 10])
    np.testing.assert_array_equal(data_2, [2, 5, 8, 11])
    np.testing.assert_array_equal(data_3, [3, 6, 9, 12])
    np.testing.assert_array_equal(data_4, [13, 15, 17, 19])
    np.testing.assert_array_equal(data_5, [14, 16, 18, 20])
Exemple #13
0
def test_key_error_getting_invalid_group():
    """Test getting a group that doesn't exist raises a KeyError
    """
    test_file = GeneratedFile()
    test_file.add_segment(*basic_segment())
    tdms_data = test_file.load()

    with pytest.raises(KeyError) as exc_info:
        _ = tdms_data['non-existent group']
    assert 'non-existent group' in str(exc_info.value)
Exemple #14
0
def test_get_object_from_group():
    """Test passing a TdmsGroup to object returns the group"""

    test_file = GeneratedFile()
    test_file.add_segment(*basic_segment())
    tdms_file = test_file.load()

    groups = tdms_file.groups()
    assert tdms_file.object(groups[0]) is groups[0]
    assert tdms_file.object(groups[0].name) is groups[0]
Exemple #15
0
def test_channel_object_paths():
    """Test the path and name properties for a channel"""
    test_file = GeneratedFile()
    test_file.add_segment(*basic_segment())
    tdms_data = test_file.load()

    channel = tdms_data["Group"]["Channel1"]
    assert channel.path == "/'Group'/'Channel1'"
    assert channel.name == "Channel1"
    assert channel.group_name == "Group"
Exemple #16
0
def test_file_properties():
    """Test reading properties of the file (root object)"""

    test_file = GeneratedFile()
    test_file.add_segment(*basic_segment())

    tdms_file = test_file.load()

    file_props = tdms_file.properties
    assert file_props['num'] == 15
Exemple #17
0
def test_file_as_dataframe_without_time():
    """Converting file to dataframe with time index should raise when
    time properties aren't present"""

    test_file = GeneratedFile()
    test_file.add_segment(*basic_segment())
    tdms_data = test_file.load()

    with pytest.raises(KeyError):
        tdms_data.as_dataframe(time_index=True)
Exemple #18
0
def test_file_as_dataframe_with_absolute_time():
    """Convert file to Pandas dataframe with absolute time index"""

    test_file = GeneratedFile()
    test_file.add_segment(*timed_segment())
    tdms_data = test_file.load()

    df = tdms_data.as_dataframe(time_index=True, absolute_time=True)

    expected_start = datetime(2015, 9, 8, 10, 5, 49)
    assert (df.index == expected_start)[0]
Exemple #19
0
def test_channel_object_paths():
    """Test the group and channel properties for a group"""
    test_file = GeneratedFile()
    test_file.add_segment(*basic_segment())
    tdms_data = test_file.load()

    obj = tdms_data["Group"]["Channel1"]
    assert obj.path == "/'Group'/'Channel1'"
    assert obj.name == "Channel1"
    assert obj.group == "Group"
    assert obj.channel == "Channel1"
Exemple #20
0
def test_channel_as_dataframe_with_time():
    """Convert a channel to dataframe with a time index"""

    test_file = GeneratedFile()
    test_file.add_segment(*timed_segment())
    tdms_data = test_file.load()

    df = tdms_data["Group"]["Channel2"].as_dataframe(time_index=True)

    assert len(df.index) == 2
    assert_within_tol(df.index[0], 2.0)
    assert_within_tol(df.index[1], 2.1)
Exemple #21
0
def test_channel_as_dataframe():
    """Convert a channel to dataframe"""

    test_file = GeneratedFile()
    test_file.add_segment(*timed_segment())
    tdms_data = test_file.load()

    df = tdms_data["Group"]["Channel2"].as_dataframe()
    assert len(df) == 2
    assert len(df.keys()) == 1
    assert "/'Group'/'Channel2'" in df.keys()
    assert (df["/'Group'/'Channel2'"] == [3, 4]).all()
Exemple #22
0
def test_file_as_dataframe_with_time():
    """Test converting file to Pandas dataframe with a time index"""

    test_file = GeneratedFile()
    test_file.add_segment(*timed_segment())
    tdms_data = test_file.load()

    df = tdms_data.as_dataframe(time_index=True)

    assert len(df.index) == 2
    assert_within_tol(df.index[0], 2.0)
    assert_within_tol(df.index[1], 2.1)
Exemple #23
0
def test_object_repr():
    """Test getting object representations of groups and channels
    """
    test_file = GeneratedFile()
    test_file.add_segment(*basic_segment())
    tdms_data = test_file.load()

    group = tdms_data["Group"]
    assert repr(group) == "<TdmsGroup with path /'Group'>"

    channel = group["Channel1"]
    assert repr(channel) == "<TdmsChannel with path /'Group'/'Channel1'>"
Exemple #24
0
def test_unicode_string_data(tmp_path):
    """ Test HDF5 conversion for string datatype with non-ASCII data
    """
    strings = ["Hello, \u4E16\u754C", "\U0001F600"]
    sizes = [len(s.encode('utf-8')) for s in strings]

    test_file = GeneratedFile()
    toc = ("kTocMetaData", "kTocRawData", "kTocNewObjList")
    metadata = (
        # Number of objects
        "01 00 00 00"
        # Length of the object path
        "11 00 00 00")
    metadata += string_hexlify("/'Group'/'String'")
    metadata += (
        # Length of index information
        "1C 00 00 00"
        # Raw data data type
        "20 00 00 00"
        # Dimension
        "01 00 00 00"
        # Number of raw data values
        "02 00 00 00"
        "00 00 00 00" +
        # Number of bytes in data, including index
        hexlify_value('q', sum(sizes) + 4 * len(sizes)) +
        # Number of properties (0)
        "00 00 00 00")
    data = ""
    offset = 0
    for size in sizes:
        # Index gives end positions of strings:
        offset += size
        data += hexlify_value('i', offset)
    for string in strings:
        data += string_hexlify(string)
    test_file.add_segment(toc, metadata, data)
    tdms_data = test_file.load()

    data = tdms_data["Group"]["String"].data
    assert len(data) == len(strings)
    for expected, read in zip(strings, data):
        assert expected == read

    h5_path = tmp_path / 'h5_unicode_strings_test.h5'
    h5 = tdms_data.as_hdf(h5_path)
    h5_strings = h5['Group']['String']
    assert h5_strings.dtype.kind == 'O'
    assert h5_strings.shape[0] == len(strings)
    for expected, read in zip(strings, h5_strings[...]):
        assert expected == read
    h5.close()
Exemple #25
0
def test_get_objects():
    """Test reading data"""

    test_file = GeneratedFile()
    test_file.add_segment(*basic_segment())
    tdms_file = test_file.load()

    objects = tdms_file.objects
    assert len(objects) == 4
    assert "/" in objects.keys()
    assert "/'Group'" in objects.keys()
    assert "/'Group'/'Channel1'" in objects.keys()
    assert "/'Group'/'Channel2'" in objects.keys()
Exemple #26
0
def test_group_as_dataframe():
    """Convert a group to dataframe"""

    test_file = GeneratedFile()
    test_file.add_segment(*timed_segment())
    tdms_data = test_file.load()

    df = tdms_data["Group"].as_dataframe()
    assert len(df) == 2
    assert len(df.keys()) == 2
    assert "Channel1" in df.keys()
    assert "Channel2" in df.keys()
    assert (df["Channel1"] == [1, 2]).all()
    assert (df["Channel2"] == [3, 4]).all()
Exemple #27
0
def test_file_as_dataframe():
    """Test converting file to Pandas dataframe"""

    test_file = GeneratedFile()
    test_file.add_segment(*timed_segment())
    tdms_data = test_file.load()

    df = tdms_data.as_dataframe()

    assert len(df) == 2
    assert "/'Group'/'Channel1'" in df.keys()
    assert "/'Group'/'Channel2'" in df.keys()

    assert (df["/'Group'/'Channel1'"] == [1, 2]).all()
Exemple #28
0
def test_as_hdf_string(tmp_path):
    """ Test HDF5 conversion for string datatype
    """
    strings = ["abc123", "?<>~`!@#$%^&*()-=_+,.;'[]:{}|"]

    test_file = GeneratedFile()
    toc = ("kTocMetaData", "kTocRawData", "kTocNewObjList")
    metadata = (
        # Number of objects
        "01 00 00 00"
        # Length of the object path
        "11 00 00 00")
    metadata += string_hexlify("/'Group'/'String'")
    metadata += (
        # Length of index information
        "1C 00 00 00"
        # Raw data data type
        "20 00 00 00"
        # Dimension
        "01 00 00 00"
        # Number of raw data values
        "02 00 00 00"
        "00 00 00 00"
        # Number of bytes in data
        "2B 00 00 00"
        "00 00 00 00"
        # Number of properties (0)
        "00 00 00 00")
    data = (
        "06 00 00 00"  # index to after first string
        "24 00 00 00"  # index to after second string
    )
    for string in strings:
        data += string_hexlify(string)
    test_file.add_segment(toc, metadata, data)
    tdms_data = test_file.load()

    data = tdms_data["Group"]["String"].data
    assert len(data) == len(strings)
    for expected, read in zip(strings, data):
        assert expected == read

    h5_path = tmp_path / 'h5_strings_test.h5'
    h5 = tdms_data.as_hdf(h5_path)
    h5_strings = h5['Group']['String']
    assert h5_strings.dtype.kind == 'O'
    assert h5_strings.shape[0] == len(strings)
    for expected, read in zip(strings, h5_strings[...]):
        assert expected == read
    h5.close()
Exemple #29
0
def test_time_track():
    """Add a time track to waveform data"""

    test_file = GeneratedFile()
    (toc, metadata, data) = basic_segment()
    test_file.add_segment(toc, metadata, data)
    tdms_data = test_file.load()

    channel = tdms_data["Group"]["Channel2"]
    time = channel.time_track()
    assert len(time) == len(channel.data)
    epsilon = 1.0E-15
    assert abs(time[0]) < epsilon
    assert abs(time[1] - 0.1) < epsilon
Exemple #30
0
def test_channel_as_dataframe_without_time():
    """Converting channel to dataframe should work correctly"""

    test_file = GeneratedFile()
    test_file.add_segment(*basic_segment())
    tdms_data = test_file.load()

    df = tdms_data["Group"]["Channel2"].as_dataframe()

    assert len(df.index) == 2
    assert len(df.values) == 2
    assert_within_tol(df.index[0], 0)
    assert_within_tol(df.index[1], 1)
    assert_within_tol(df.values[0], 3.0)
    assert_within_tol(df.values[1], 4.0)