示例#1
0
def test_channel_as_dataframe(lazy_load):
    """Convert a channel to dataframe"""

    test_file = GeneratedFile()
    test_file.add_segment(*timed_segment())

    df = on_test_file(
        test_file, lazy_load, lambda tdms_data: tdms_data["Group"]["Channel2"].as_dataframe())

    assert len(df) == 2
    assert len(df.keys()) == 1
    assert "/'Group'/'Channel2'" in df.keys()
    assert (df["/'Group'/'Channel2'"] == [3, 4]).all()
示例#2
0
def test_file_as_dataframe(lazy_load):
    """Test converting file to Pandas dataframe"""

    test_file = GeneratedFile()
    test_file.add_segment(*timed_segment())

    df = on_test_file(test_file, lazy_load, lambda tdms_data: tdms_data.as_dataframe())

    assert len(df) == 2
    assert "/'Group'/'Channel1'" in df.keys()
    assert "/'Group'/'Channel2'" in df.keys()

    assert (df["/'Group'/'Channel1'"] == [1, 2]).all()
示例#3
0
def test_get_objects():
    """Test reading data"""

    test_file = GeneratedFile()
    test_file.add_segment(*basic_segment())
    tdms_file = test_file.load()

    objects = tdms_file.objects
    assert len(objects) == 4
    assert "/" in objects.keys()
    assert "/'Group'" in objects.keys()
    assert "/'Group'/'Channel1'" in objects.keys()
    assert "/'Group'/'Channel2'" in objects.keys()
示例#4
0
def test_group_as_dataframe(lazy_load):
    """Convert a group to dataframe"""

    test_file = GeneratedFile()
    test_file.add_segment(*timed_segment())

    df = on_test_file(test_file, lazy_load, lambda tdms_data: tdms_data["Group"].as_dataframe())

    assert len(df) == 2
    assert len(df.keys()) == 2
    assert "Channel1" in df.keys()
    assert "Channel2" in df.keys()
    assert (df["Channel1"] == [1, 2]).all()
    assert (df["Channel2"] == [3, 4]).all()
示例#5
0
def test_time_track():
    """Add a time track to waveform data"""

    test_file = GeneratedFile()
    (toc, metadata, data) = basic_segment()
    test_file.add_segment(toc, metadata, data)
    tdms_data = test_file.load()

    channel = tdms_data["Group"]["Channel2"]
    time = channel.time_track()
    assert len(time) == len(channel.data)
    epsilon = 1.0E-15
    assert abs(time[0]) < epsilon
    assert abs(time[1] - 0.1) < epsilon
示例#6
0
def test_as_hdf_string(tmp_path):
    """ Test HDF5 conversion for string datatype
    """
    strings = ["abc123", "?<>~`!@#$%^&*()-=_+,.;'[]:{}|"]

    test_file = GeneratedFile()
    toc = ("kTocMetaData", "kTocRawData", "kTocNewObjList")
    metadata = (
        # Number of objects
        "01 00 00 00"
        # Length of the object path
        "11 00 00 00")
    metadata += string_hexlify("/'Group'/'String'")
    metadata += (
        # Length of index information
        "1C 00 00 00"
        # Raw data data type
        "20 00 00 00"
        # Dimension
        "01 00 00 00"
        # Number of raw data values
        "02 00 00 00"
        "00 00 00 00"
        # Number of bytes in data
        "2B 00 00 00"
        "00 00 00 00"
        # Number of properties (0)
        "00 00 00 00")
    data = (
        "06 00 00 00"  # index to after first string
        "24 00 00 00"  # index to after second string
    )
    for string in strings:
        data += string_hexlify(string)
    test_file.add_segment(toc, metadata, data)
    tdms_data = test_file.load()

    data = tdms_data["Group"]["String"].data
    assert len(data) == len(strings)
    for expected, read in zip(strings, data):
        assert expected == read

    h5_path = tmp_path / 'h5_strings_test.h5'
    h5 = tdms_data.as_hdf(h5_path)
    h5_strings = h5['Group']['String']
    assert h5_strings.dtype.kind == 'O'
    assert h5_strings.shape[0] == len(strings)
    for expected, read in zip(strings, h5_strings[...]):
        assert expected == read
    h5.close()
示例#7
0
def single_segment_with_one_channel():
    test_file = GeneratedFile()
    test_file.add_segment(("kTocMetaData", "kTocRawData", "kTocNewObjList"),
                          segment_objects_metadata(
                              channel_metadata("/'group'/'channel1'",
                                               TDS_TYPE_INT32, 4), ),
                          "01 00 00 00"
                          "02 00 00 00"
                          "03 00 00 00"
                          "04 00 00 00")
    expected_data = {
        ('group', 'channel1'): np.array([1, 2, 3, 4], dtype=np.int32),
    }
    return test_file, expected_data
示例#8
0
def test_group_as_dataframe():
    """Convert a group to dataframe"""

    test_file = GeneratedFile()
    test_file.add_segment(*timed_segment())
    tdms_data = test_file.load()

    df = tdms_data["Group"].as_dataframe()
    assert len(df) == 2
    assert len(df.keys()) == 2
    assert "Channel1" in df.keys()
    assert "Channel2" in df.keys()
    assert (df["Channel1"] == [1, 2]).all()
    assert (df["Channel2"] == [3, 4]).all()
示例#9
0
def test_stream_data_chunks():
    """Test streaming chunks of DAQmx data from a TDMS file
    """
    properties = {
        "NI_Number_Of_Scales": (3, "01 00 00 00"),
    }
    scaler_1 = daqmx_scaler_metadata(0, 3, 0)
    scaler_2 = daqmx_scaler_metadata(0, 3, 2)
    metadata = segment_objects_metadata(
        root_metadata(), group_metadata(),
        daqmx_channel_metadata("Channel1", 4, [4], [scaler_1], properties),
        daqmx_channel_metadata("Channel2", 4, [4], [scaler_2], properties))
    data = (
        # Data for segment
        "01 00"
        "11 00"
        "02 00"
        "12 00"
        "03 00"
        "13 00"
        "04 00"
        "14 00"
        "05 00"
        "15 00"
        "06 00"
        "16 00"
        "07 00"
        "17 00"
        "08 00"
        "18 00")

    test_file = GeneratedFile()
    test_file.add_segment(segment_toc(), metadata, data)
    data_arrays = defaultdict(list)
    with test_file.get_tempfile() as temp_file:
        with TdmsFile.open(temp_file.file) as tdms_file:
            for chunk in tdms_file.data_chunks():
                for group in chunk.groups():
                    for channel in group.channels():
                        key = (group.name, channel.name)
                        assert channel.offset == len(data_arrays[key])
                        data_arrays[key].extend(channel[:])

    expected_channel_data = {
        ("Group", "Channel1"): [1, 2, 3, 4, 5, 6, 7, 8],
        ("Group", "Channel2"): [17, 18, 19, 20, 21, 22, 23, 24],
    }
    for ((group, channel), expected_data) in expected_channel_data.items():
        actual_data = data_arrays[(group, channel)]
        np.testing.assert_equal(actual_data, expected_data)
示例#10
0
def test_memmapped_read():
    """Test reading data into memmapped arrays"""

    test_file = GeneratedFile()
    test_file.add_segment(*basic_segment())
    tdms_data = test_file.load(memmap_dir=tempfile.gettempdir())

    data = tdms_data["Group"]["Channel1"].data
    assert len(data) == 2
    assert data[0] == 1
    assert data[1] == 2
    data = tdms_data["Group"]["Channel2"].data
    assert len(data) == 2
    assert data[0] == 3
    assert data[1] == 4
示例#11
0
def test_tdmsinfo_with_properties(capsys):
    test_file = GeneratedFile()
    test_file.add_segment(*basic_segment())
    temp_file = test_file.get_tempfile(delete=False)
    try:
        temp_file.file.close()
        with patch.object(sys, 'argv',
                          ['tdmsinfo.py', temp_file.name, '--properties']):
            tdmsinfo.main()
            captured = capsys.readouterr()
            assert "/'Group'/'Channel1'" in captured.out
            assert "wf_start_offset: 0.0" in captured.out
            assert "length: 2" in captured.out
    finally:
        os.remove(temp_file.name)
示例#12
0
def test_channel_as_dataframe_without_time():
    """Converting channel to dataframe should work correctly"""

    test_file = GeneratedFile()
    test_file.add_segment(*basic_segment())
    tdms_data = test_file.load()

    df = tdms_data["Group"]["Channel2"].as_dataframe()

    assert len(df.index) == 2
    assert len(df.values) == 2
    assert_within_tol(df.index[0], 0)
    assert_within_tol(df.index[1], 1)
    assert_within_tol(df.values[0], 3.0)
    assert_within_tol(df.values[1], 4.0)
示例#13
0
def test_multiple_raw_data_buffers_with_scalers_split_across_buffers():
    """ DAQmx with scalers split across different raw data buffers
    """

    scaler_1 = daqmx_scaler_metadata(0, 3, 0, 0)
    scaler_2 = daqmx_scaler_metadata(1, 3, 0, 1)
    scaler_3 = daqmx_scaler_metadata(0, 3, 2, 0)
    scaler_4 = daqmx_scaler_metadata(1, 3, 2, 1)
    metadata = segment_objects_metadata(
        root_metadata(), group_metadata(),
        daqmx_channel_metadata("Channel1", 4, [4, 4], [scaler_1, scaler_2]),
        daqmx_channel_metadata("Channel2", 4, [4, 4], [scaler_3, scaler_4]))
    data = ("01 00"
            "02 00"
            "03 00"
            "04 00"
            "05 00"
            "06 00"
            "07 00"
            "08 00"
            "09 00"
            "0A 00"
            "0B 00"
            "0C 00"
            "0D 00"
            "0E 00"
            "0F 00"
            "10 00")

    test_file = GeneratedFile()
    test_file.add_segment(segment_toc(), metadata, data)
    tdms_data = test_file.load()

    channel_1 = tdms_data["Group"]["Channel1"]
    channel_2 = tdms_data["Group"]["Channel2"]

    scaler_data_1 = channel_1.raw_scaler_data[0]
    scaler_data_2 = channel_1.raw_scaler_data[1]
    scaler_data_3 = channel_2.raw_scaler_data[0]
    scaler_data_4 = channel_2.raw_scaler_data[1]

    for data in [scaler_data_1, scaler_data_2, scaler_data_3, scaler_data_4]:
        assert data.dtype == np.int16

    np.testing.assert_array_equal(scaler_data_1, [1, 3, 5, 7])
    np.testing.assert_array_equal(scaler_data_2, [9, 11, 13, 15])
    np.testing.assert_array_equal(scaler_data_3, [2, 4, 6, 8])
    np.testing.assert_array_equal(scaler_data_4, [10, 12, 14, 16])
示例#14
0
def test_multiple_raw_data_buffers_with_different_widths():
    """ DAQmx with raw data buffers with different widths
    """

    scaler_1 = daqmx_scaler_metadata(0, 3, 0, 0)
    scaler_2 = daqmx_scaler_metadata(0, 3, 2, 0)
    scaler_3 = daqmx_scaler_metadata(0, 3, 4, 0)
    scaler_4 = daqmx_scaler_metadata(0, 5, 0, 1)
    scaler_5 = daqmx_scaler_metadata(0, 5, 4, 1)
    metadata = segment_objects_metadata(
        root_metadata(),
        group_metadata(),
        daqmx_channel_metadata("Channel1", 4, [6, 8], [scaler_1]),
        daqmx_channel_metadata("Channel2", 4, [6, 8], [scaler_2]),
        daqmx_channel_metadata("Channel3", 4, [6, 8], [scaler_3]),
        daqmx_channel_metadata("Channel4", 4, [6, 8], [scaler_4]),
        daqmx_channel_metadata("Channel5", 4, [6, 8], [scaler_5]))
    data = (
        "01 00" "02 00" "03 00"
        "04 00" "05 00" "06 00"
        "07 00" "08 00" "09 00"
        "0A 00" "0B 00" "0C 00"
        "0D 00 00 00" "0E 00 00 00"
        "0F 00 00 00" "10 00 00 00"
        "11 00 00 00" "12 00 00 00"
        "13 00 00 00" "14 00 00 00"
    )

    test_file = GeneratedFile()
    test_file.add_segment(segment_toc(), metadata, data)
    tdms_data = test_file.load()

    data_1 = tdms_data["Group"]["Channel1"].raw_data
    data_2 = tdms_data["Group"]["Channel2"].raw_data
    data_3 = tdms_data["Group"]["Channel3"].raw_data
    data_4 = tdms_data["Group"]["Channel4"].raw_data
    data_5 = tdms_data["Group"]["Channel5"].raw_data

    for data in [data_1, data_2, data_3]:
        assert data.dtype == np.int16
    for data in [data_4, data_5]:
        assert data.dtype == np.int32

    np.testing.assert_array_equal(data_1, [1, 4, 7, 10])
    np.testing.assert_array_equal(data_2, [2, 5, 8, 11])
    np.testing.assert_array_equal(data_3, [3, 6, 9, 12])
    np.testing.assert_array_equal(data_4, [13, 15, 17, 19])
    np.testing.assert_array_equal(data_5, [14, 16, 18, 20])
示例#15
0
def bool_data():
    """ Test reading a file with boolean valued data
    """
    expected_channel_data = np.array([False, True, False, True],
                                     dtype=np.dtype('bool8'))

    test_file = GeneratedFile()
    test_file.add_segment(("kTocMetaData", "kTocRawData", "kTocNewObjList"),
                          segment_objects_metadata(
                              channel_metadata("/'group'/'bool_channel'",
                                               TDS_TYPE_BOOL, 2), ),
                          "00 01 00 01")
    expected_data = {
        ('group', 'bool_channel'): expected_channel_data,
    }
    return test_file, expected_data
示例#16
0
def test_multiple_raw_data_buffers():
    """ Test loading a DAQmx file with multiple raw data buffers
    """

    scaler_1 = daqmx_scaler_metadata(0, 3, 0, 0)
    scaler_2 = daqmx_scaler_metadata(0, 3, 2, 0)
    scaler_3 = daqmx_scaler_metadata(0, 3, 0, 1)
    scaler_4 = daqmx_scaler_metadata(0, 3, 2, 1)
    metadata = segment_objects_metadata(
        root_metadata(), group_metadata(),
        daqmx_channel_metadata("Channel1", 4, [4, 4], [scaler_1]),
        daqmx_channel_metadata("Channel2", 4, [4, 4], [scaler_2]),
        daqmx_channel_metadata("Channel3", 4, [4, 4], [scaler_3]),
        daqmx_channel_metadata("Channel4", 4, [4, 4], [scaler_4]))
    data = ("01 00"
            "02 00"
            "03 00"
            "04 00"
            "05 00"
            "06 00"
            "07 00"
            "08 00"
            "09 00"
            "0A 00"
            "0B 00"
            "0C 00"
            "0D 00"
            "0E 00"
            "0F 00"
            "10 00")

    test_file = GeneratedFile()
    test_file.add_segment(segment_toc(), metadata, data)
    tdms_data = test_file.load()

    data_1 = tdms_data["Group"]["Channel1"].raw_data
    data_2 = tdms_data["Group"]["Channel2"].raw_data
    data_3 = tdms_data["Group"]["Channel3"].raw_data
    data_4 = tdms_data["Group"]["Channel4"].raw_data

    for data in [data_1, data_2, data_3, data_4]:
        assert data.dtype == np.int16

    np.testing.assert_array_equal(data_1, [1, 3, 5, 7])
    np.testing.assert_array_equal(data_2, [2, 4, 6, 8])
    np.testing.assert_array_equal(data_3, [9, 11, 13, 15])
    np.testing.assert_array_equal(data_4, [10, 12, 14, 16])
示例#17
0
def test_stream_scaled_data_chunks(benchmark):
    """ Benchmark streaming channel data when the data is scaled
    """
    properties = {
        "NI_Number_Of_Scales": (3, "01 00 00 00"),
        "NI_Scale[0]_Scale_Type":
        (0x20, hexlify_value("<I", len("Linear")) + string_hexlify("Linear")),
        "NI_Scale[0]_Linear_Slope": (10, hexlify_value("<d", 2.0)),
        "NI_Scale[0]_Linear_Y_Intercept": (10, hexlify_value("<d", 10.0))
    }
    test_file = GeneratedFile()
    data_array = np.arange(0, 1000, dtype=np.dtype('int32'))
    data = data_array.tobytes()
    test_file.add_segment(
        ("kTocMetaData", "kTocRawData", "kTocNewObjList"),
        segment_objects_metadata(
            channel_metadata("/'group'/'channel1'", TDS_TYPE_INT32, 100,
                             properties), ),
        data,
        binary_data=True)
    for _ in range(0, 9):
        test_file.add_segment(("kTocRawData", ), "", data, binary_data=True)

    with TdmsFile.open(test_file.get_bytes_io_file()) as tdms_file:
        channel = tdms_file['group']['channel1']
        channel_data = benchmark(stream_chunks, channel)

        channel_data = np.concatenate(channel_data)
        expected_data = np.tile(10.0 + 2.0 * data_array, 10)
        np.testing.assert_equal(channel_data, expected_data)
示例#18
0
def test_reading_subset_of_data(offset, length):
    channel_data = np.arange(0, 100, 1, dtype=np.int32)
    # Split data into different sized segments
    segment_data = [
        channel_data[0:10],
        channel_data[10:20],
        channel_data[20:60],
        channel_data[60:80],
        channel_data[80:90],
        channel_data[90:100],
    ]
    hex_segment_data = [
        "".join(hexlify_value('<i', x) for x in data) for data in segment_data
    ]
    test_file = GeneratedFile()
    test_file.add_segment(
        ("kTocMetaData", "kTocRawData", "kTocNewObjList"),
        segment_objects_metadata(channel_metadata("/'group'/'channel1'", 3,
                                                  5), ), hex_segment_data[0])
    for hex_data in hex_segment_data[1:]:
        test_file.add_segment(("kTocRawData", ), "", hex_data)

    with test_file.get_tempfile() as temp_file:
        with TdmsFile.open(temp_file.file) as tdms_file:
            channel_subset = tdms_file['group']['channel1'].read_data(
                offset, length)
            expected_data = channel_data[offset:offset + length]
            assert len(channel_subset) == len(expected_data)
            np.testing.assert_equal(channel_subset, expected_data)
示例#19
0
def test_single_quote_in_name():
    group_1 = "group''s name"
    channel_1 = "channel''s name"

    test_file = GeneratedFile()
    test_file.add_segment(
        ("kTocMetaData", "kTocRawData", "kTocNewObjList"),
        segment_objects_metadata(
            channel_metadata("/'{0}'/'{1}'".format(group_1, channel_1), 3,
                             2), ), "01 00 00 00"
        "02 00 00 00")

    tdms_data = test_file.load()

    assert len(tdms_data.groups()) == 1
    assert len(tdms_data["group's name"].channels()) == 1
    data_1 = tdms_data["group's name"]["channel's name"].data
    assert len(data_1) == 2
示例#20
0
def test_single_channel_i16():
    """ Test loading a DAQmx file with a single channel of I16 data
    """

    scaler_metadata = daqmx_scaler_metadata(0, 3, 0)
    metadata = segment_objects_metadata(
        root_metadata(), group_metadata(),
        daqmx_channel_metadata("Channel1", 4, [2], [scaler_metadata]))
    data = ("01 00" "02 00" "FF FF" "FE FF")

    test_file = GeneratedFile()
    test_file.add_segment(segment_toc(), metadata, data)
    tdms_data = test_file.load()

    data = tdms_data["Group"]["Channel1"].raw_data

    assert data.dtype == np.int16
    np.testing.assert_array_equal(data, [1, 2, -1, -2])
示例#21
0
def test_lazily_reading_a_subset_of_raw_channel_data():
    """ Test loading a subset of raw scaler channel data from a DAQmx file
    """

    # Single scale which is just the raw DAQmx scaler data
    properties = {
        "NI_Number_Of_Scales": (3, "01 00 00 00"),
    }
    scaler_1 = daqmx_scaler_metadata(0, 3, 0)
    scaler_2 = daqmx_scaler_metadata(0, 3, 2)
    metadata = segment_objects_metadata(
        root_metadata(), group_metadata(),
        daqmx_channel_metadata("Channel1", 4, [4], [scaler_1], properties),
        daqmx_channel_metadata("Channel2", 4, [4], [scaler_2], properties))
    data = (
        # Data for segment
        "01 00"
        "11 00"
        "02 00"
        "12 00"
        "03 00"
        "13 00"
        "04 00"
        "14 00")

    test_file = GeneratedFile()
    test_file.add_segment(segment_toc(), metadata, data)

    with test_file.get_tempfile() as temp_file:
        with TdmsFile.open(temp_file.file) as tdms_file:
            data_1 = tdms_file["Group"]["Channel1"].read_data(1,
                                                              2,
                                                              scaled=False)
            assert len(data_1) == 1
            assert data_1[0].dtype == np.int16
            np.testing.assert_array_equal(data_1[0], [2, 3])

            data_2 = tdms_file["Group"]["Channel2"].read_data(1,
                                                              2,
                                                              scaled=False)
            assert len(data_2) == 1
            assert data_2[0].dtype == np.int16
            np.testing.assert_array_equal(data_2[0], [18, 19])
示例#22
0
def test_int_data_types(tmp_path):
    """ Test conversion of signed and unsigned integer types to HDF
    """
    test_file = GeneratedFile()
    test_file.add_segment(
        ("kTocMetaData", "kTocRawData", "kTocNewObjList"),
        segment_objects_metadata(
            channel_metadata("/'group'/'i8'", 1, 4),
            channel_metadata("/'group'/'u8'", 5, 4),
            channel_metadata("/'group'/'i16'", 2, 4),
            channel_metadata("/'group'/'u16'", 6, 4),
            channel_metadata("/'group'/'i32'", 3, 4),
            channel_metadata("/'group'/'u32'", 7, 4),
            channel_metadata("/'group'/'i64'", 4, 4),
            channel_metadata("/'group'/'u64'", 8, 4),
        ),
        "01 02 03 04"
        "01 02 03 04"
        "01 00 02 00 03 00 04 00"
        "01 00 02 00 03 00 04 00"
        "01 00 00 00 02 00 00 00 03 00 00 00 04 00 00 00"
        "01 00 00 00 02 00 00 00 03 00 00 00 04 00 00 00"
        "01 00 00 00 00 00 00 00 02 00 00 00 00 00 00 00 03 00 00 00 00 00 00 00 04 00 00 00 00 00 00 00"
        "01 00 00 00 00 00 00 00 02 00 00 00 00 00 00 00 03 00 00 00 00 00 00 00 04 00 00 00 00 00 00 00"
    )

    tdms_data = test_file.load()
    h5_path = tmp_path / 'h5_data_test.h5'
    h5 = tdms_data.as_hdf(h5_path)

    for chan, expected_dtype in [
            ('i8', np.dtype('int8')),
            ('u8', np.dtype('uint8')),
            ('i16', np.dtype('int16')),
            ('u16', np.dtype('uint16')),
            ('i32', np.dtype('int32')),
            ('u32', np.dtype('uint32')),
            ('i64', np.dtype('int64')),
            ('u64', np.dtype('uint64'))]:
        h5_channel = h5['group'][chan]
        assert h5_channel.dtype == expected_dtype
        np.testing.assert_almost_equal(h5_channel[...], [1, 2, 3, 4])
    h5.close()
示例#23
0
def test_daqmx_debug_logging(caplog):
    """ Test loading a DAQmx file with debug logging enabled
    """
    scaler_metadata = daqmx_scaler_metadata(0, 3, 0)
    metadata = segment_objects_metadata(
        root_metadata(), group_metadata(),
        daqmx_channel_metadata("Channel1", 4, [2], [scaler_metadata]))
    data = ("01 00" "02 00" "FF FF" "FE FF")

    test_file = GeneratedFile()
    test_file.add_segment(segment_toc(), metadata, data)

    log_manager.set_level(logging.DEBUG)
    _ = test_file.load()

    assert "Reading metadata for object /'Group'/'Channel1' with index header 0x00001269" in caplog.text
    assert "scaler_type=4713" in caplog.text
    assert "scale_id=0" in caplog.text
    assert "data_type=Int16" in caplog.text
示例#24
0
def test_string_data():
    """Test reading a file with string data"""

    strings = ["abcdefg", "qwertyuiop"]

    test_file = GeneratedFile()
    toc = ("kTocMetaData", "kTocRawData", "kTocNewObjList")
    metadata = (
        # Number of objects
        "01 00 00 00"
        # Length of the object path
        "18 00 00 00")
    metadata += string_hexlify("/'Group'/'StringChannel'")
    metadata += (
        # Length of index information
        "1C 00 00 00"
        # Raw data data type
        "20 00 00 00"
        # Dimension
        "01 00 00 00"
        # Number of raw data values
        "02 00 00 00"
        "00 00 00 00"
        # Number of bytes in data
        "19 00 00 00"
        "00 00 00 00"
        # Number of properties (0)
        "00 00 00 00")
    data = (
        "07 00 00 00"  # index to after first string
        "11 00 00 00"  # index to after second string
    )
    for string in strings:
        data += string_hexlify(string)
    test_file.add_segment(toc, metadata, data)
    tdms_data = test_file.load()

    channel = tdms_data["Group"]["StringChannel"]
    assert len(channel.data) == len(strings)
    assert channel.data.dtype == channel.dtype
    for expected, read in zip(strings, channel.data):
        assert expected == read
示例#25
0
def test_read_raw_timestamp_properties():
    """ Test reading timestamp properties as a raw TDMS timestamp
    """
    test_file = GeneratedFile()
    second_fractions = 1234567890 * 10**10
    properties = {
        "wf_start_time": (0x44, hexlify_value("<Q", second_fractions) +
                          hexlify_value("<q", 3524551547))
    }
    test_file.add_segment(("kTocMetaData", "kTocRawData", "kTocNewObjList"),
                          segment_objects_metadata(
                              channel_metadata("/'group'/'channel1'", 3, 2,
                                               properties), ), "01 00 00 00"
                          "02 00 00 00")

    with test_file.get_tempfile() as temp_file:
        tdms_data = TdmsFile.read(temp_file.file, raw_timestamps=True)
        start_time = tdms_data['group']['channel1'].properties['wf_start_time']
        assert start_time.seconds == 3524551547
        assert start_time.second_fractions == second_fractions
示例#26
0
def test_digital_line_scaler_data():
    """ Test loading a DAQmx file with a single channel of U8 digital line scaler data
    """

    scaler_metadata = daqmx_scaler_metadata(0, 0, 2, digital_line_scaler=True)
    metadata = segment_objects_metadata(
        root_metadata(), group_metadata(),
        daqmx_channel_metadata("Channel1",
                               4, [4], [scaler_metadata],
                               digital_line_scaler=True))
    data = ("00 00 00 00" "00 00 01 00" "00 00 00 00" "00 00 01 00")

    test_file = GeneratedFile()
    test_file.add_segment(segment_toc(), metadata, data)
    tdms_data = test_file.load()

    data = tdms_data["Group"]["Channel1"].raw_data

    assert data.dtype == np.uint8
    np.testing.assert_array_equal(data, [0, 1, 0, 1])
示例#27
0
def test_digital_line_scaler_data_uses_first_bit_of_bytes():
    """ Test DAQmx digital line scaler data only uses the first bit in each byte to represent a 1 or 0 value
    """

    scaler_metadata = daqmx_scaler_metadata(0, 0, 2, digital_line_scaler=True)
    metadata = segment_objects_metadata(
        root_metadata(), group_metadata(),
        daqmx_channel_metadata("Channel1",
                               4, [4], [scaler_metadata],
                               digital_line_scaler=True))
    data = ("00 00 00 00" "00 00 01 00" "00 00 02 00" "00 00 03 00")

    test_file = GeneratedFile()
    test_file.add_segment(segment_toc(), metadata, data)
    tdms_data = test_file.load()

    data = tdms_data["Group"]["Channel1"].raw_data

    assert data.dtype == np.uint8
    np.testing.assert_array_equal(data, [0, 1, 0, 1])
示例#28
0
def test_hdf_properties(tmp_path):
    """ Test properties are converted to attributes in HDF files
    """
    test_file = GeneratedFile()
    test_file.add_segment(*basic_segment())
    tdms_data = test_file.load()

    h5_path = tmp_path / 'h5_properties_test.h5'
    h5 = tdms_data.as_hdf(h5_path)

    # File level properties
    assert h5.attrs['num'] == 15

    # Group properties
    assert h5['Group'].attrs['prop'] == 'value'
    assert h5['Group'].attrs['num'] == 10

    # Channel properties
    assert h5['Group']['Channel2'].attrs['wf_start_offset'] == 0.0
    assert h5['Group']['Channel2'].attrs['wf_increment'] == 0.1
示例#29
0
def test_export_with_empty_channels():
    """Convert a group to dataframe when a channel has empty data and void data type"""

    test_file = GeneratedFile()
    test_file.add_segment(
        ("kTocMetaData", "kTocRawData", "kTocNewObjList"),
        segment_objects_metadata(
            channel_metadata("/'group'/'channel1'", 3, 2),
            channel_metadata_with_no_data("/'group'/'channel2'"),
        ), "01 00 00 00 02 00 00 00")

    tdms_data = test_file.load()

    df = tdms_data["group"].as_dataframe()
    assert len(df) == 2
    assert len(df.keys()) == 2
    assert "channel1" in df.keys()
    assert "channel2" in df.keys()
    assert (df["channel1"] == [1, 2]).all()
    assert len(df["channel2"]) == 2
    assert np.isnan(df["channel2"]).all()
示例#30
0
def test_timestamp_property(tmp_path):
    """ Test a timestamp property is converted to an attribute in an HDF file
        HDF doesn't support timestamps natively, so these are converted to strings
    """
    test_file = GeneratedFile()
    properties = {
        "wf_start_time": (0x44, hexlify_value("<Q", 0) + hexlify_value("<q", 3524551547))
    }
    test_file.add_segment(
        ("kTocMetaData", "kTocRawData", "kTocNewObjList"),
        segment_objects_metadata(
            channel_metadata("/'group'/'channel1'", 3, 2, properties),
        ),
        "01 00 00 00" "02 00 00 00"
    )
    tdms_data = test_file.load()

    h5_path = tmp_path / 'h5_properties_test.h5'
    h5 = tdms_data.as_hdf(h5_path)

    assert h5['group']['channel1'].attrs['wf_start_time'] == b'2015-09-08T10:05:47.000000Z'