Esempio n. 1
0
def float_data():
    """ Test reading a file with float valued data
    """
    single_arr = np.array([0.123, 0.234, 0.345, 0.456], dtype=np.float32)
    double_arr = np.array([0.987, 0.876, 0.765, 0.654], dtype=np.double)
    data = ""
    for num in single_arr[0:2]:
        data += hexlify_value("<f", num)
    for num in double_arr[0:2]:
        data += hexlify_value("<d", num)
    for num in single_arr[2:4]:
        data += hexlify_value("<f", num)
    for num in double_arr[2:4]:
        data += hexlify_value("<d", num)

    test_file = GeneratedFile()
    test_file.add_segment(
        ("kTocMetaData", "kTocRawData", "kTocNewObjList"),
        segment_objects_metadata(
            channel_metadata("/'group'/'single_channel'", TDS_TYPE_FLOAT32, 2),
            channel_metadata("/'group'/'double_channel'", TDS_TYPE_FLOAT64, 2),
        ), data)
    expected_data = {
        ('group', 'single_channel'): single_arr,
        ('group', 'double_channel'): double_arr,
    }
    return test_file, expected_data
Esempio n. 2
0
def test_mixed_channel_widths():
    """ Test loading a DAQmx file with channels with different widths
    """

    scaler_1 = daqmx_scaler_metadata(0, 1, 0)
    scaler_2 = daqmx_scaler_metadata(0, 3, 1)
    scaler_3 = daqmx_scaler_metadata(0, 5, 3)
    metadata = segment_objects_metadata(
        root_metadata(), group_metadata(),
        daqmx_channel_metadata("Channel1", 4, [7], [scaler_1]),
        daqmx_channel_metadata("Channel2", 4, [7], [scaler_2]),
        daqmx_channel_metadata("Channel3", 4, [7], [scaler_3]))
    data = (
        # Data for segment
        "01 11 00 21 00 00 00"
        "02 12 00 22 00 00 00"
        "03 13 00 23 00 00 00"
        "04 14 00 24 00 00 00")

    test_file = GeneratedFile()
    test_file.add_segment(segment_toc(), metadata, data)
    tdms_data = test_file.load()

    data_1 = tdms_data["Group"]["Channel1"].raw_data
    assert data_1.dtype == np.int8
    np.testing.assert_array_equal(data_1, [1, 2, 3, 4])

    data_2 = tdms_data["Group"]["Channel2"].raw_data
    assert data_2.dtype == np.int16
    np.testing.assert_array_equal(data_2, [17, 18, 19, 20])

    data_3 = tdms_data["Group"]["Channel3"].raw_data
    assert data_3.dtype == np.int32
    np.testing.assert_array_equal(data_3, [33, 34, 35, 36])
Esempio n. 3
0
def test_read_interleaved_timestamp_data(benchmark):
    """ Benchmark reading a file with interleaved timestamp data
    """
    timestamps = np.tile(
        np.array([
            np.datetime64('2012-08-23T00:00:00.123', 'us'),
            np.datetime64('2012-08-23T01:02:03.456', 'us'),
            np.datetime64('2012-08-23T12:00:00.0', 'us'),
            np.datetime64('2012-08-23T12:02:03.9999', 'us'),
            np.datetime64('2012-08-23T12:02:03.9999', 'us'),
            np.datetime64('2012-08-23T00:00:00.123', 'us'),
            np.datetime64('2012-08-23T01:02:03.456', 'us'),
            np.datetime64('2012-08-23T12:00:00.0', 'us'),
            np.datetime64('2012-08-23T12:02:03.9999', 'us'),
            np.datetime64('2012-08-23T12:02:03.9999', 'us'),
        ]), 100)
    data = timestamp_data_chunk(timestamps)

    test_file = GeneratedFile()
    test_file.add_segment(
        ("kTocMetaData", "kTocRawData", "kTocNewObjList",
         "kTocInterleavedData"),
        segment_objects_metadata(
            channel_metadata("/'group'/'channel1'", 0x44, 100, {}),
            channel_metadata("/'group'/'channel2'", 0x44, 100, {}),
        ), data)

    tdms_file = benchmark(read_from_start, test_file.get_bytes_io_file())

    np.testing.assert_equal(tdms_file['group']['channel1'][:],
                            timestamps[0::2])
    np.testing.assert_equal(tdms_file['group']['channel2'][:],
                            timestamps[1::2])
Esempio n. 4
0
def test_multiple_scalers_with_same_type():
    """ Test loading a DAQmx file with one channel containing multiple
        format changing scalers of the same type
    """

    scaler_metadata = [
        daqmx_scaler_metadata(0, 3, 0),
        daqmx_scaler_metadata(1, 3, 2)
    ]
    metadata = segment_objects_metadata(
        root_metadata(), group_metadata(),
        daqmx_channel_metadata("Channel1", 4, [4], scaler_metadata))
    data = (
        # Data for segment
        "01 00"
        "11 00"
        "02 00"
        "12 00"
        "03 00"
        "13 00"
        "04 00"
        "14 00")

    test_file = GeneratedFile()
    test_file.add_segment(segment_toc(), metadata, data)
    tdms_data = test_file.load()
    channel = tdms_data["Group"]["Channel1"]

    scaler_0_data = channel.raw_scaler_data[0]
    assert scaler_0_data.dtype == np.int16
    np.testing.assert_array_equal(scaler_0_data, [1, 2, 3, 4])

    scaler_1_data = channel.raw_scaler_data[1]
    assert scaler_1_data.dtype == np.int16
    np.testing.assert_array_equal(scaler_1_data, [17, 18, 19, 20])
Esempio n. 5
0
def test_single_channel_u32():
    """ Test loading a DAQmx file with a single channel of U32 data
    """

    scaler_metadata = daqmx_scaler_metadata(0, 4, 0)
    metadata = segment_objects_metadata(
        root_metadata(),
        group_metadata(),
        daqmx_channel_metadata("Channel1", 4, [4], [scaler_metadata]))
    data = (
        # Data for segment
        "01 00 00 00"
        "02 00 00 00"
        "FF FF FF FF"
        "FE FF FF FF"
    )

    test_file = GeneratedFile()
    test_file.add_segment(segment_toc(), metadata, data)
    tdms_data = test_file.load()

    data = tdms_data["Group"]["Channel1"].raw_data

    assert data.dtype == np.uint32
    np.testing.assert_array_equal(data, [1, 2, 2**32 - 1, 2**32 - 2])
Esempio n. 6
0
def timestamp_data():
    """Test reading contiguous timestamp data
    """

    times = [
        np.datetime64('2012-08-23T00:00:00.123', 'us'),
        np.datetime64('2012-08-23T01:02:03.456', 'us'),
        np.datetime64('2012-08-23T12:00:00.0', 'us'),
        np.datetime64('2012-08-23T12:02:03.9999', 'us'),
    ]

    test_file = GeneratedFile()
    test_file.add_segment(
        ("kTocMetaData", "kTocRawData", "kTocNewObjList"),
        segment_objects_metadata(
            channel_metadata("/'Group'/'TimeChannel1'", 0x44, 2),
            channel_metadata("/'Group'/'TimeChannel2'", 0x44, 2),
        ), timestamp_data_chunk(times))

    expected_data = {
        ('Group', 'TimeChannel1'): np.array([times[0], times[1]]),
        ('Group', 'TimeChannel2'): np.array([times[2], times[3]]),
    }

    return test_file, expected_data
Esempio n. 7
0
def segment_without_data():
    test_file = GeneratedFile()
    test_file.add_segment(("kTocMetaData", "kTocNewObjList"),
                          segment_objects_metadata(
                              channel_metadata("/'group'/'channel1'",
                                               TDS_TYPE_INT32, 2), ), "")
    test_file.add_segment(
        ("kTocMetaData", "kTocNewObjList", "kTocRawData"),
        segment_objects_metadata(
            channel_metadata_with_repeated_structure("/'group'/'channel1'"), ),
        "01 00 00 00"
        "02 00 00 00")
    test_file.add_segment(
        ("kTocMetaData", "kTocNewObjList"),
        segment_objects_metadata(
            channel_metadata_with_repeated_structure("/'group'/'channel1'"), ),
        "")
    test_file.add_segment(
        ("kTocMetaData", "kTocNewObjList", "kTocRawData"),
        segment_objects_metadata(
            channel_metadata_with_repeated_structure("/'group'/'channel1'"), ),
        "03 00 00 00"
        "04 00 00 00")
    expected_data = {
        ('group', 'channel1'): np.array([1, 2, 3, 4], dtype=np.int32),
    }
    return test_file, expected_data
Esempio n. 8
0
def add_new_channel():
    """ Add a new voltage channel, with the other two channels
        remaining unchanged, so only the new channel is in metadata section
    """
    test_file = GeneratedFile()
    test_file.add_segment(
        ("kTocMetaData", "kTocRawData", "kTocNewObjList"),
        segment_objects_metadata(
            channel_metadata("/'group'/'channel1'", TDS_TYPE_INT32, 2),
            channel_metadata("/'group'/'channel2'", TDS_TYPE_INT32, 2),
        ), "01 00 00 00"
        "02 00 00 00"
        "03 00 00 00"
        "04 00 00 00")
    test_file.add_segment(("kTocMetaData", "kTocRawData"),
                          segment_objects_metadata(
                              channel_metadata("/'group'/'channel3'",
                                               TDS_TYPE_INT32, 2), ),
                          "05 00 00 00"
                          "06 00 00 00"
                          "07 00 00 00"
                          "08 00 00 00"
                          "09 00 00 00"
                          "0A 00 00 00")
    expected_data = {
        ('group', 'channel1'): np.array([1, 2, 5, 6], dtype=np.int32),
        ('group', 'channel2'): np.array([3, 4, 7, 8], dtype=np.int32),
        ('group', 'channel3'): np.array([9, 10], dtype=np.int32),
    }
    return test_file, expected_data
Esempio n. 9
0
def complex_data():
    """ Test reading a file with complex valued data
    """
    complex_single_arr = np.array([1 + 2j, 3 + 4j], dtype=np.complex64)
    complex_double_arr = np.array([5 + 6j, 7 + 8j], dtype=np.complex128)
    data = ""
    for num in complex_single_arr:
        data += hexlify_value("<f", num.real)
        data += hexlify_value("<f", num.imag)
    for num in complex_double_arr:
        data += hexlify_value("<d", num.real)
        data += hexlify_value("<d", num.imag)

    test_file = GeneratedFile()
    test_file.add_segment(
        ("kTocMetaData", "kTocRawData", "kTocNewObjList"),
        segment_objects_metadata(
            channel_metadata("/'group'/'complex_single_channel'",
                             TDS_TYPE_COMPLEX64, 2),
            channel_metadata("/'group'/'complex_double_channel'",
                             TDS_TYPE_COMPLEX128, 2),
        ), data)
    expected_data = {
        ('group', 'complex_single_channel'): complex_single_arr,
        ('group', 'complex_double_channel'): complex_double_arr,
    }
    return test_file, expected_data
Esempio n. 10
0
def incomplete_last_row_of_interleaved_data():
    """ Test incomplete last row of interleaved data
    """
    test_file = GeneratedFile()
    test_file.add_segment(
        ("kTocMetaData", "kTocRawData", "kTocNewObjList",
         "kTocInterleavedData"),
        segment_objects_metadata(
            channel_metadata("/'group'/'channel1'", TDS_TYPE_INT32, 2),
            channel_metadata("/'group'/'channel2'", TDS_TYPE_INT32, 2),
        ), "01 00 00 00"
        "02 00 00 00"
        "03 00 00 00"
        "04 00 00 00"
        "05 00 00 00"
        "06 00 00 00"
        "07 00 00 00"
        "08 00 00 00"
        "09 00 00 00"
        "0A 00 00 00"
        "0B 00 00 00",
        incomplete=True)
    expected_data = {
        ('group', 'channel1'): np.array([1, 3, 5, 7, 9], dtype=np.int32),
        ('group', 'channel2'): np.array([2, 4, 6, 8, 10], dtype=np.int32),
    }
    return test_file, expected_data
Esempio n. 11
0
def object_with_no_metadata_in_segment():
    """ Re-use an object without setting any new metadata and
        re-using the data structure
    """
    test_file = GeneratedFile()
    test_file.add_segment(
        ("kTocMetaData", "kTocRawData", "kTocNewObjList"),
        segment_objects_metadata(
            channel_metadata("/'group'/'channel1'", TDS_TYPE_INT32, 2),
            channel_metadata("/'group'/'channel2'", TDS_TYPE_INT32, 2),
        ), "01 00 00 00"
        "02 00 00 00"
        "03 00 00 00"
        "04 00 00 00")
    test_file.add_segment(
        ("kTocMetaData", "kTocRawData", "kTocNewObjList"),
        segment_objects_metadata(
            channel_metadata_with_repeated_structure("/'group'/'channel1'"),
            channel_metadata_with_repeated_structure("/'group'/'channel2'"),
        ), "05 00 00 00"
        "06 00 00 00"
        "07 00 00 00"
        "08 00 00 00")
    expected_data = {
        ('group', 'channel1'): np.array([1, 2, 5, 6], dtype=np.int32),
        ('group', 'channel2'): np.array([3, 4, 7, 8], dtype=np.int32),
    }
    return test_file, expected_data
Esempio n. 12
0
def less_data_than_expected_interleaved():
    """ Add interleaved data segment and then a repeated segment without
        any lead in or metadata, so data is read in chunks,
        but the extra chunk does not have as much data as expected.
    """
    test_file = GeneratedFile()
    test_file.add_segment(
        ("kTocMetaData", "kTocRawData", "kTocNewObjList",
         "kTocInterleavedData"),
        segment_objects_metadata(
            channel_metadata("/'group'/'channel1'", TDS_TYPE_INT32, 3),
            channel_metadata("/'group'/'channel2'", TDS_TYPE_INT32, 3),
        ), "01 00 00 00"
        "02 00 00 00"
        "03 00 00 00"
        "04 00 00 00"
        "05 00 00 00"
        "06 00 00 00"
        "07 00 00 00"
        "08 00 00 00"
        "09 00 00 00"
        "0A 00 00 00")
    expected_data = {
        ('group', 'channel1'): np.array([1, 3, 5, 7, 9], dtype=np.int32),
        ('group', 'channel2'): np.array([2, 4, 6, 8, 10], dtype=np.int32),
    }
    return test_file, expected_data
Esempio n. 13
0
def incomplete_last_segment():
    """ Test incomplete last segment, eg. if LabView crashed
    """
    test_file = GeneratedFile()
    test_file.add_segment(
        ("kTocMetaData", "kTocRawData", "kTocNewObjList"),
        segment_objects_metadata(
            channel_metadata("/'group'/'channel1'", TDS_TYPE_INT32, 2),
            channel_metadata("/'group'/'channel2'", TDS_TYPE_INT32, 2),
        ), "01 00 00 00"
        "02 00 00 00"
        "03 00 00 00"
        "04 00 00 00"
        "05 00 00 00"
        "06 00 00 00"
        "07 00 00 00"
        "08 00 00 00")
    test_file.add_segment(("kTocRawData", ),
                          "", "09 00 00 00"
                          "0A 00 00 00"
                          "0B 00 00 00"
                          "0C 00 00 00",
                          incomplete=True)
    expected_data = {
        ('group', 'channel1'): np.array([1, 2, 5, 6, 9, 10], dtype=np.int32),
        ('group', 'channel2'): np.array([3, 4, 7, 8, 11, 12], dtype=np.int32),
    }
    return test_file, expected_data
Esempio n. 14
0
def chunked_segment():
    """ Add segment and then a repeated segment without
        any lead in or metadata, so data is read in chunks
    """
    test_file = GeneratedFile()
    test_file.add_segment(
        ("kTocMetaData", "kTocRawData", "kTocNewObjList"),
        segment_objects_metadata(
            channel_metadata("/'group'/'channel1'", TDS_TYPE_INT32, 2),
            channel_metadata("/'group'/'channel2'", TDS_TYPE_INT32, 2),
        ), "00 00 00 00"
        "01 00 00 00"
        "0A 00 00 00"
        "0B 00 00 00"
        "02 00 00 00"
        "03 00 00 00"
        "0C 00 00 00"
        "0D 00 00 00")
    test_file.add_segment(("kTocRawData", ), "", "04 00 00 00"
                          "05 00 00 00"
                          "0E 00 00 00"
                          "0F 00 00 00"
                          "06 00 00 00"
                          "07 00 00 00"
                          "10 00 00 00"
                          "11 00 00 00")
    expected_data = {
        ('group', 'channel1'):
        np.array([0, 1, 2, 3, 4, 5, 6, 7], dtype=np.int32),
        ('group', 'channel2'):
        np.array([10, 11, 12, 13, 14, 15, 16, 17], dtype=np.int32),
    }
    return test_file, expected_data
Esempio n. 15
0
def chunked_interleaved_segment():
    """ Add interleaved segment and then a repeated segment without
        any lead in or metadata, so data is read in chunks
    """
    test_file = GeneratedFile()
    test_file.add_segment(
        ("kTocMetaData", "kTocRawData", "kTocNewObjList",
         "kTocInterleavedData"),
        segment_objects_metadata(
            channel_metadata("/'group'/'channel1'", TDS_TYPE_INT32, 2),
            channel_metadata("/'group'/'channel2'", TDS_TYPE_INT32, 2),
        ), "01 00 00 00"
        "02 00 00 00"
        "03 00 00 00"
        "04 00 00 00"
        "05 00 00 00"
        "06 00 00 00"
        "07 00 00 00"
        "08 00 00 00")
    test_file.add_segment(("kTocRawData", "kTocInterleavedData"), "",
                          "07 00 00 00"
                          "08 00 00 00"
                          "05 00 00 00"
                          "06 00 00 00"
                          "03 00 00 00"
                          "04 00 00 00"
                          "01 00 00 00"
                          "02 00 00 00")
    expected_data = {
        ('group', 'channel1'): np.array([1, 3, 5, 7, 7, 5, 3, 1],
                                        dtype=np.int32),
        ('group', 'channel2'): np.array([2, 4, 6, 8, 8, 6, 4, 2],
                                        dtype=np.int32),
    }
    return test_file, expected_data
Esempio n. 16
0
def remove_a_channel():
    """ In the second segment, remove a channel.
        We need to write a new object list in this case
    """
    test_file = GeneratedFile()
    test_file.add_segment(
        ("kTocMetaData", "kTocRawData", "kTocNewObjList"),
        segment_objects_metadata(
            channel_metadata("/'group'/'channel1'", TDS_TYPE_INT32, 2),
            channel_metadata("/'group'/'channel2'", TDS_TYPE_INT32, 2),
        ), "01 00 00 00"
        "02 00 00 00"
        "03 00 00 00"
        "04 00 00 00")
    test_file.add_segment(("kTocMetaData", "kTocRawData", "kTocNewObjList"),
                          segment_objects_metadata(
                              channel_metadata("/'group'/'channel1'",
                                               TDS_TYPE_INT32, 2), ),
                          "05 00 00 00"
                          "06 00 00 00")
    expected_data = {
        ('group', 'channel1'): np.array([1, 2, 5, 6], dtype=np.int32),
        ('group', 'channel2'): np.array([3, 4], dtype=np.int32),
    }
    return test_file, expected_data
Esempio n. 17
0
def increase_channel_size():
    """ In the second segment, increase the channel size of one channel
    """
    test_file = GeneratedFile()
    test_file.add_segment(
        ("kTocMetaData", "kTocRawData", "kTocNewObjList"),
        segment_objects_metadata(
            channel_metadata("/'group'/'channel1'", TDS_TYPE_INT32, 2),
            channel_metadata("/'group'/'channel2'", TDS_TYPE_INT32, 2),
        ), "01 00 00 00"
        "02 00 00 00"
        "03 00 00 00"
        "04 00 00 00")
    test_file.add_segment(("kTocMetaData", "kTocRawData"),
                          segment_objects_metadata(
                              channel_metadata("/'group'/'channel2'",
                                               TDS_TYPE_INT32, 4), ),
                          "05 00 00 00"
                          "06 00 00 00"
                          "07 00 00 00"
                          "08 00 00 00"
                          "09 00 00 00"
                          "0A 00 00 00")
    expected_data = {
        ('group', 'channel1'): np.array([1, 2, 5, 6], dtype=np.int32),
        ('group', 'channel2'): np.array([3, 4, 7, 8, 9, 10], dtype=np.int32),
    }
    return test_file, expected_data
Esempio n. 18
0
def repeated_objects_without_data_in_segment_and_added_object():
    """ Repeated objects with no data in new segment as well as a new channel with data
    """
    test_file = GeneratedFile()
    test_file.add_segment(
        ("kTocMetaData", "kTocRawData", "kTocNewObjList"),
        segment_objects_metadata(
            channel_metadata("/'group'/'channel1'", TDS_TYPE_INT32, 2),
            channel_metadata("/'group'/'channel2'", TDS_TYPE_INT32, 2),
        ), "01 00 00 00"
        "02 00 00 00"
        "03 00 00 00"
        "04 00 00 00")
    test_file.add_segment(
        ("kTocMetaData", "kTocRawData", "kTocNewObjList"),
        segment_objects_metadata(
            channel_metadata_with_no_data("/'group'/'channel1'"),
            channel_metadata_with_no_data("/'group'/'channel2'"),
            channel_metadata("/'group'/'channel3'", TDS_TYPE_INT32, 2),
        ), "05 00 00 00"
        "06 00 00 00")
    expected_data = {
        ('group', 'channel1'): np.array([1, 2], dtype=np.int32),
        ('group', 'channel2'): np.array([3, 4], dtype=np.int32),
        ('group', 'channel3'): np.array([5, 6], dtype=np.int32),
    }
    return test_file, expected_data
Esempio n. 19
0
def test_reading_subset_of_data(offset, length):
    channel_data = np.arange(0, 100, 1, dtype=np.int32)
    # Split data into different sized segments
    segment_data = [
        channel_data[0:10],
        channel_data[10:20],
        channel_data[20:60],
        channel_data[60:80],
        channel_data[80:90],
        channel_data[90:100],
    ]
    hex_segment_data = [
        "".join(hexlify_value('<i', x) for x in data) for data in segment_data
    ]
    test_file = GeneratedFile()
    test_file.add_segment(
        ("kTocMetaData", "kTocRawData", "kTocNewObjList"),
        segment_objects_metadata(channel_metadata("/'group'/'channel1'", 3,
                                                  5), ), hex_segment_data[0])
    for hex_data in hex_segment_data[1:]:
        test_file.add_segment(("kTocRawData", ), "", hex_data)

    with test_file.get_tempfile() as temp_file:
        with TdmsFile.open(temp_file.file) as tdms_file:
            channel_subset = tdms_file['group']['channel1'].read_data(
                offset, length)
            expected_data = channel_data[offset:offset + length]
            assert len(channel_subset) == len(expected_data)
            np.testing.assert_equal(channel_subset, expected_data)
Esempio n. 20
0
def test_stream_scaled_data_chunks(benchmark):
    """ Benchmark streaming channel data when the data is scaled
    """
    properties = {
        "NI_Number_Of_Scales": (3, "01 00 00 00"),
        "NI_Scale[0]_Scale_Type":
        (0x20, hexlify_value("<I", len("Linear")) + string_hexlify("Linear")),
        "NI_Scale[0]_Linear_Slope": (10, hexlify_value("<d", 2.0)),
        "NI_Scale[0]_Linear_Y_Intercept": (10, hexlify_value("<d", 10.0))
    }
    test_file = GeneratedFile()
    data_array = np.arange(0, 1000, dtype=np.dtype('int32'))
    data = data_array.tobytes()
    test_file.add_segment(
        ("kTocMetaData", "kTocRawData", "kTocNewObjList"),
        segment_objects_metadata(
            channel_metadata("/'group'/'channel1'", TDS_TYPE_INT32, 100,
                             properties), ),
        data,
        binary_data=True)
    for _ in range(0, 9):
        test_file.add_segment(("kTocRawData", ), "", data, binary_data=True)

    with TdmsFile.open(test_file.get_bytes_io_file()) as tdms_file:
        channel = tdms_file['group']['channel1']
        channel_data = benchmark(stream_chunks, channel)

        channel_data = np.concatenate(channel_data)
        expected_data = np.tile(10.0 + 2.0 * data_array, 10)
        np.testing.assert_equal(channel_data, expected_data)
Esempio n. 21
0
def float_data_with_unit():
    """ Test reading a file with float valued data with units

        These are the same as normal floating point data but have a 'unit_string' property
    """
    single_arr = np.array([0.123, 0.234, 0.345, 0.456], dtype=np.float32)
    double_arr = np.array([0.987, 0.876, 0.765, 0.654], dtype=np.double)
    data = ""
    for num in single_arr[0:2]:
        data += hexlify_value("<f", num)
    for num in double_arr[0:2]:
        data += hexlify_value("<d", num)
    for num in single_arr[2:4]:
        data += hexlify_value("<f", num)
    for num in double_arr[2:4]:
        data += hexlify_value("<d", num)

    test_file = GeneratedFile()
    test_file.add_segment(("kTocMetaData", "kTocRawData", "kTocNewObjList"),
                          segment_objects_metadata(
                              channel_metadata("/'group'/'single_channel'",
                                               TDS_TYPE_FLOAT32_WITH_UNIT, 2),
                              channel_metadata("/'group'/'double_channel'",
                                               TDS_TYPE_FLOAT64_WITH_UNIT, 2),
                          ), data)
    expected_data = {
        ('group', 'single_channel'): single_arr,
        ('group', 'double_channel'): double_arr,
    }
    return test_file, expected_data
Esempio n. 22
0
def test_daqmx_debug_logging(caplog):
    """ Test loading a DAQmx file with debug logging enabled
    """
    scaler_metadata = daqmx_scaler_metadata(0, 3, 0)
    metadata = segment_objects_metadata(
        root_metadata(),
        group_metadata(),
        daqmx_channel_metadata("Channel1", 4, [2], [scaler_metadata]))
    data = (
        "01 00"
        "02 00"
        "FF FF"
        "FE FF"
    )

    test_file = GeneratedFile()
    test_file.add_segment(segment_toc(), metadata, data)

    log_manager.set_level(logging.DEBUG)
    _ = test_file.load()

    assert "Reading metadata for object /'Group'/'Channel1' with index header 0x00001269" in caplog.text
    assert "scaler_type=4713" in caplog.text
    assert "scale_id=0" in caplog.text
    assert "data_type=Int16" in caplog.text
Esempio n. 23
0
def test_two_channel_i16():
    """ Test loading a DAQmx file with two channels of I16 data
    """

    scaler_1 = daqmx_scaler_metadata(0, 3, 0)
    scaler_2 = daqmx_scaler_metadata(0, 3, 2)
    metadata = segment_objects_metadata(
        root_metadata(),
        group_metadata(),
        daqmx_channel_metadata("Channel1", 4, [4], [scaler_1]),
        daqmx_channel_metadata("Channel2", 4, [4], [scaler_2]))
    data = (
        # Data for segment
        "01 00"
        "11 00"
        "02 00"
        "12 00"
        "03 00"
        "13 00"
        "04 00"
        "14 00"
    )

    test_file = GeneratedFile()
    test_file.add_segment(segment_toc(), metadata, data)
    tdms_data = test_file.load()

    data_1 = tdms_data["Group"]["Channel1"].raw_data
    assert data_1.dtype == np.int16
    np.testing.assert_array_equal(data_1, [1, 2, 3, 4])

    data_2 = tdms_data["Group"]["Channel2"].raw_data
    assert data_2.dtype == np.int16
    np.testing.assert_array_equal(data_2, [17, 18, 19, 20])
Esempio n. 24
0
def test_slash_and_space_in_name():
    """Test name like '01/02/03 something'"""

    group_1 = "01/02/03 something"
    channel_1 = "04/05/06 another thing"
    group_2 = "01/02/03 a"
    channel_2 = "04/05/06 b"

    test_file = GeneratedFile()
    test_file.add_segment(
        ("kTocMetaData", "kTocRawData", "kTocNewObjList"),
        segment_objects_metadata(
            channel_metadata("/'{0}'/'{1}'".format(group_1, channel_1), 3, 2),
            channel_metadata("/'{0}'/'{1}'".format(group_2, channel_2), 3, 2),
        ), "01 00 00 00"
        "02 00 00 00"
        "03 00 00 00"
        "04 00 00 00")

    tdms_data = test_file.load()

    assert len(tdms_data.groups()) == 2
    assert len(tdms_data[group_1].channels()) == 1
    assert len(tdms_data[group_2].channels()) == 1
    data_1 = tdms_data[group_1][channel_1].data
    assert len(data_1) == 2
    data_2 = tdms_data[group_2][channel_2].data
    assert len(data_2) == 2
Esempio n. 25
0
def test_get_len_of_group():
    """Test getting the length of a TdmsGroup
    """
    test_file = GeneratedFile()
    test_file.add_segment(*basic_segment())
    tdms_data = test_file.load()

    assert len(tdms_data['Group']) == 2
Esempio n. 26
0
def test_group_object_paths():
    """Test the path and name properties for a group"""
    test_file = GeneratedFile()
    test_file.add_segment(*basic_segment())
    tdms_data = test_file.load()

    group = tdms_data["Group"]
    assert group.path == "/'Group'"
    assert group.name == "Group"
Esempio n. 27
0
def test_root_object_paths():
    """Test the group and channel properties for the root object"""
    test_file = GeneratedFile()
    test_file.add_segment(*basic_segment())
    tdms_data = test_file.load()

    obj = tdms_data.object()
    assert obj.group is None
    assert obj.channel is None
Esempio n. 28
0
def test_group_property_read():
    """Test reading property of a group"""

    test_file = GeneratedFile()
    test_file.add_segment(*basic_segment())
    tdms_data = test_file.load()

    group = tdms_data["Group"]
    assert group.properties["num"] == 10
Esempio n. 29
0
def test_multiple_raw_data_buffers_with_different_widths():
    """ DAQmx with raw data buffers with different widths
    """

    scaler_1 = daqmx_scaler_metadata(0, 3, 0, 0)
    scaler_2 = daqmx_scaler_metadata(0, 3, 2, 0)
    scaler_3 = daqmx_scaler_metadata(0, 3, 4, 0)
    scaler_4 = daqmx_scaler_metadata(0, 5, 0, 1)
    scaler_5 = daqmx_scaler_metadata(0, 5, 4, 1)
    metadata = segment_objects_metadata(
        root_metadata(), group_metadata(),
        daqmx_channel_metadata("Channel1", 4, [6, 8], [scaler_1]),
        daqmx_channel_metadata("Channel2", 4, [6, 8], [scaler_2]),
        daqmx_channel_metadata("Channel3", 4, [6, 8], [scaler_3]),
        daqmx_channel_metadata("Channel4", 4, [6, 8], [scaler_4]),
        daqmx_channel_metadata("Channel5", 4, [6, 8], [scaler_5]))
    data = ("01 00"
            "02 00"
            "03 00"
            "04 00"
            "05 00"
            "06 00"
            "07 00"
            "08 00"
            "09 00"
            "0A 00"
            "0B 00"
            "0C 00"
            "0D 00 00 00"
            "0E 00 00 00"
            "0F 00 00 00"
            "10 00 00 00"
            "11 00 00 00"
            "12 00 00 00"
            "13 00 00 00"
            "14 00 00 00")

    test_file = GeneratedFile()
    test_file.add_segment(segment_toc(), metadata, data)
    tdms_data = test_file.load()

    data_1 = tdms_data["Group"]["Channel1"].raw_data
    data_2 = tdms_data["Group"]["Channel2"].raw_data
    data_3 = tdms_data["Group"]["Channel3"].raw_data
    data_4 = tdms_data["Group"]["Channel4"].raw_data
    data_5 = tdms_data["Group"]["Channel5"].raw_data

    for data in [data_1, data_2, data_3]:
        assert data.dtype == np.int16
    for data in [data_4, data_5]:
        assert data.dtype == np.int32

    np.testing.assert_array_equal(data_1, [1, 4, 7, 10])
    np.testing.assert_array_equal(data_2, [2, 5, 8, 11])
    np.testing.assert_array_equal(data_3, [3, 6, 9, 12])
    np.testing.assert_array_equal(data_4, [13, 15, 17, 19])
    np.testing.assert_array_equal(data_5, [14, 16, 18, 20])
Esempio n. 30
0
def interleaved_timestamp_and_numpy_data():
    """Test reading timestamp data interleaved with a standard numpy data type
    """

    times = [
        np.datetime64('2012-08-23T00:00:00.123', 'us'),
        np.datetime64('2012-08-23T01:02:03.456', 'us'),
    ]

    metadata = (
        # Number of objects
        "02 00 00 00"
        # Length of the object path
        "16 00 00 00")
    metadata += string_hexlify("/'Group'/'TimeChannel'")
    metadata += (
        # Length of index information
        "14 00 00 00"
        # Raw data data type
        "44 00 00 00"
        # Dimension
        "01 00 00 00"
        # Number of raw data values
        "02 00 00 00"
        "00 00 00 00"
        # Number of properties (0)
        "00 00 00 00")
    metadata += ("15 00 00 00")
    metadata += string_hexlify("/'Group'/'IntChannel'")
    metadata += (
        # Length of index information
        "14 00 00 00"
        # Raw data data type
        "03 00 00 00"
        # Dimension
        "01 00 00 00"
        # Number of raw data values
        "02 00 00 00"
        "00 00 00 00"
        # Number of properties (0)
        "00 00 00 00")

    data = (timestamp_data_chunk([times[0]]) + "01 00 00 00" +
            timestamp_data_chunk([times[1]]) + "02 00 00 00")

    test_file = GeneratedFile()
    toc = ("kTocMetaData", "kTocRawData", "kTocNewObjList",
           "kTocInterleavedData")
    test_file.add_segment(toc, metadata, data)

    expected_data = {
        ('Group', 'TimeChannel'): np.array([times[0], times[1]]),
        ('Group', 'IntChannel'): np.array([1, 2], dtype=np.dtype('int32')),
    }

    return test_file, expected_data