コード例 #1
0
def object_with_no_metadata_in_segment():
    """ Re-use an object without setting any new metadata and
        re-using the data structure
    """
    test_file = GeneratedFile()
    test_file.add_segment(
        ("kTocMetaData", "kTocRawData", "kTocNewObjList"),
        segment_objects_metadata(
            channel_metadata("/'group'/'channel1'", TDS_TYPE_INT32, 2),
            channel_metadata("/'group'/'channel2'", TDS_TYPE_INT32, 2),
        ), "01 00 00 00"
        "02 00 00 00"
        "03 00 00 00"
        "04 00 00 00")
    test_file.add_segment(
        ("kTocMetaData", "kTocRawData", "kTocNewObjList"),
        segment_objects_metadata(
            channel_metadata_with_repeated_structure("/'group'/'channel1'"),
            channel_metadata_with_repeated_structure("/'group'/'channel2'"),
        ), "05 00 00 00"
        "06 00 00 00"
        "07 00 00 00"
        "08 00 00 00")
    expected_data = {
        ('group', 'channel1'): np.array([1, 2, 5, 6], dtype=np.int32),
        ('group', 'channel2'): np.array([3, 4, 7, 8], dtype=np.int32),
    }
    return test_file, expected_data
コード例 #2
0
def add_new_channel():
    """ Add a new voltage channel, with the other two channels
        remaining unchanged, so only the new channel is in metadata section
    """
    test_file = GeneratedFile()
    test_file.add_segment(
        ("kTocMetaData", "kTocRawData", "kTocNewObjList"),
        segment_objects_metadata(
            channel_metadata("/'group'/'channel1'", TDS_TYPE_INT32, 2),
            channel_metadata("/'group'/'channel2'", TDS_TYPE_INT32, 2),
        ), "01 00 00 00"
        "02 00 00 00"
        "03 00 00 00"
        "04 00 00 00")
    test_file.add_segment(("kTocMetaData", "kTocRawData"),
                          segment_objects_metadata(
                              channel_metadata("/'group'/'channel3'",
                                               TDS_TYPE_INT32, 2), ),
                          "05 00 00 00"
                          "06 00 00 00"
                          "07 00 00 00"
                          "08 00 00 00"
                          "09 00 00 00"
                          "0A 00 00 00")
    expected_data = {
        ('group', 'channel1'): np.array([1, 2, 5, 6], dtype=np.int32),
        ('group', 'channel2'): np.array([3, 4, 7, 8], dtype=np.int32),
        ('group', 'channel3'): np.array([9, 10], dtype=np.int32),
    }
    return test_file, expected_data
コード例 #3
0
def remove_a_channel():
    """ In the second segment, remove a channel.
        We need to write a new object list in this case
    """
    test_file = GeneratedFile()
    test_file.add_segment(
        ("kTocMetaData", "kTocRawData", "kTocNewObjList"),
        segment_objects_metadata(
            channel_metadata("/'group'/'channel1'", TDS_TYPE_INT32, 2),
            channel_metadata("/'group'/'channel2'", TDS_TYPE_INT32, 2),
        ), "01 00 00 00"
        "02 00 00 00"
        "03 00 00 00"
        "04 00 00 00")
    test_file.add_segment(("kTocMetaData", "kTocRawData", "kTocNewObjList"),
                          segment_objects_metadata(
                              channel_metadata("/'group'/'channel1'",
                                               TDS_TYPE_INT32, 2), ),
                          "05 00 00 00"
                          "06 00 00 00")
    expected_data = {
        ('group', 'channel1'): np.array([1, 2, 5, 6], dtype=np.int32),
        ('group', 'channel2'): np.array([3, 4], dtype=np.int32),
    }
    return test_file, expected_data
コード例 #4
0
def segment_without_data():
    test_file = GeneratedFile()
    test_file.add_segment(("kTocMetaData", "kTocNewObjList"),
                          segment_objects_metadata(
                              channel_metadata("/'group'/'channel1'",
                                               TDS_TYPE_INT32, 2), ), "")
    test_file.add_segment(
        ("kTocMetaData", "kTocNewObjList", "kTocRawData"),
        segment_objects_metadata(
            channel_metadata_with_repeated_structure("/'group'/'channel1'"), ),
        "01 00 00 00"
        "02 00 00 00")
    test_file.add_segment(
        ("kTocMetaData", "kTocNewObjList"),
        segment_objects_metadata(
            channel_metadata_with_repeated_structure("/'group'/'channel1'"), ),
        "")
    test_file.add_segment(
        ("kTocMetaData", "kTocNewObjList", "kTocRawData"),
        segment_objects_metadata(
            channel_metadata_with_repeated_structure("/'group'/'channel1'"), ),
        "03 00 00 00"
        "04 00 00 00")
    expected_data = {
        ('group', 'channel1'): np.array([1, 2, 3, 4], dtype=np.int32),
    }
    return test_file, expected_data
コード例 #5
0
def increase_channel_size():
    """ In the second segment, increase the channel size of one channel
    """
    test_file = GeneratedFile()
    test_file.add_segment(
        ("kTocMetaData", "kTocRawData", "kTocNewObjList"),
        segment_objects_metadata(
            channel_metadata("/'group'/'channel1'", TDS_TYPE_INT32, 2),
            channel_metadata("/'group'/'channel2'", TDS_TYPE_INT32, 2),
        ), "01 00 00 00"
        "02 00 00 00"
        "03 00 00 00"
        "04 00 00 00")
    test_file.add_segment(("kTocMetaData", "kTocRawData"),
                          segment_objects_metadata(
                              channel_metadata("/'group'/'channel2'",
                                               TDS_TYPE_INT32, 4), ),
                          "05 00 00 00"
                          "06 00 00 00"
                          "07 00 00 00"
                          "08 00 00 00"
                          "09 00 00 00"
                          "0A 00 00 00")
    expected_data = {
        ('group', 'channel1'): np.array([1, 2, 5, 6], dtype=np.int32),
        ('group', 'channel2'): np.array([3, 4, 7, 8, 9, 10], dtype=np.int32),
    }
    return test_file, expected_data
コード例 #6
0
def repeated_objects_without_data_in_segment_and_added_object():
    """ Repeated objects with no data in new segment as well as a new channel with data
    """
    test_file = GeneratedFile()
    test_file.add_segment(
        ("kTocMetaData", "kTocRawData", "kTocNewObjList"),
        segment_objects_metadata(
            channel_metadata("/'group'/'channel1'", TDS_TYPE_INT32, 2),
            channel_metadata("/'group'/'channel2'", TDS_TYPE_INT32, 2),
        ), "01 00 00 00"
        "02 00 00 00"
        "03 00 00 00"
        "04 00 00 00")
    test_file.add_segment(
        ("kTocMetaData", "kTocRawData", "kTocNewObjList"),
        segment_objects_metadata(
            channel_metadata_with_no_data("/'group'/'channel1'"),
            channel_metadata_with_no_data("/'group'/'channel2'"),
            channel_metadata("/'group'/'channel3'", TDS_TYPE_INT32, 2),
        ), "05 00 00 00"
        "06 00 00 00")
    expected_data = {
        ('group', 'channel1'): np.array([1, 2], dtype=np.int32),
        ('group', 'channel2'): np.array([3, 4], dtype=np.int32),
        ('group', 'channel3'): np.array([5, 6], dtype=np.int32),
    }
    return test_file, expected_data
コード例 #7
0
def test_read_with_mismatching_index_file():
    """ Test that reading data when the index file doesn't match the data file raises an error
    """

    test_file = GeneratedFile()
    test_file.add_segment(
        ("kTocMetaData", "kTocRawData", "kTocNewObjList"),
        segment_objects_metadata(
            channel_metadata("/'group'/'channel1'", 3, 2),
            channel_metadata("/'group'/'channel2'", 3, 2),
        ),
        "01 00 00 00" "02 00 00 00"
        "03 00 00 00" "04 00 00 00"
    )
    test_file.add_segment(
        ("kTocMetaData", "kTocRawData", "kTocNewObjList"),
        segment_objects_metadata(
            channel_metadata("/'group'/'channel1'", 3, 2),
            channel_metadata("/'group'/'channel2'", 3, 2),
        ),
        "01 00 00 00" "02 00 00 00"
        "03 00 00 00" "04 00 00 00"
    )

    test_file_with_index = GeneratedFile()
    test_file_with_index.add_segment(
        ("kTocMetaData", "kTocRawData", "kTocNewObjList"),
        segment_objects_metadata(
            channel_metadata("/'group'/'channel1'", 3, 3),
            channel_metadata("/'group'/'channel2'", 3, 3),
        ),
        "01 00 00 00" "02 00 00 00" "03 00 00 00"
        "04 00 00 00" "05 00 00 00" "06 00 00 00"
    )
    test_file_with_index.add_segment(
        ("kTocMetaData", "kTocRawData", "kTocNewObjList"),
        segment_objects_metadata(
            channel_metadata("/'group'/'channel1'", 3, 3),
            channel_metadata("/'group'/'channel2'", 3, 3),
        ),
        "01 00 00 00" "02 00 00 00" "03 00 00 00"
        "04 00 00 00" "05 00 00 00" "06 00 00 00"
    )

    with test_file.get_tempfile(delete=False) as tdms_file:
        with test_file_with_index.get_tempfile_with_index() as tdms_file_with_index_path:
            # Move index file from second file to match the name of the first file
            new_index_file = tdms_file.name + '_index'
            copyfile(tdms_file_with_index_path + '_index', new_index_file)
            try:
                tdms_file.file.close()
                with pytest.raises(ValueError) as exc_info:
                    _ = TdmsFile.read(tdms_file.name)
                assert 'Check that the tdms_index file matches the tdms data file' in str(exc_info.value)
            finally:
                os.remove(new_index_file)
                os.remove(tdms_file.name)
コード例 #8
0
ファイル: test_daqmx.py プロジェクト: xiangnandang/npTDMS
def test_multiple_scalers_with_same_type():
    """ Test loading a DAQmx file with one channel containing multiple
        format changing scalers of the same type
    """

    scaler_metadata = [
        daqmx_scaler_metadata(0, 3, 0),
        daqmx_scaler_metadata(1, 3, 2)
    ]
    metadata = segment_objects_metadata(
        root_metadata(), group_metadata(),
        daqmx_channel_metadata("Channel1", 4, [4], scaler_metadata))
    data = (
        # Data for segment
        "01 00"
        "11 00"
        "02 00"
        "12 00"
        "03 00"
        "13 00"
        "04 00"
        "14 00")

    test_file = GeneratedFile()
    test_file.add_segment(segment_toc(), metadata, data)
    tdms_data = test_file.load()
    channel = tdms_data["Group"]["Channel1"]

    scaler_0_data = channel.raw_scaler_data[0]
    assert scaler_0_data.dtype == np.int16
    np.testing.assert_array_equal(scaler_0_data, [1, 2, 3, 4])

    scaler_1_data = channel.raw_scaler_data[1]
    assert scaler_1_data.dtype == np.int16
    np.testing.assert_array_equal(scaler_1_data, [17, 18, 19, 20])
コード例 #9
0
ファイル: test_daqmx.py プロジェクト: walker202018/npTDMS
def test_single_channel_u32():
    """ Test loading a DAQmx file with a single channel of U32 data
    """

    scaler_metadata = daqmx_scaler_metadata(0, 4, 0)
    metadata = segment_objects_metadata(
        root_metadata(),
        group_metadata(),
        daqmx_channel_metadata("Channel1", 4, [4], [scaler_metadata]))
    data = (
        # Data for segment
        "01 00 00 00"
        "02 00 00 00"
        "FF FF FF FF"
        "FE FF FF FF"
    )

    test_file = GeneratedFile()
    test_file.add_segment(segment_toc(), metadata, data)
    tdms_data = test_file.load()

    data = tdms_data["Group"]["Channel1"].raw_data

    assert data.dtype == np.uint32
    np.testing.assert_array_equal(data, [1, 2, 2**32 - 1, 2**32 - 2])
コード例 #10
0
ファイル: test_daqmx.py プロジェクト: walker202018/npTDMS
def test_daqmx_debug_logging(caplog):
    """ Test loading a DAQmx file with debug logging enabled
    """
    scaler_metadata = daqmx_scaler_metadata(0, 3, 0)
    metadata = segment_objects_metadata(
        root_metadata(),
        group_metadata(),
        daqmx_channel_metadata("Channel1", 4, [2], [scaler_metadata]))
    data = (
        "01 00"
        "02 00"
        "FF FF"
        "FE FF"
    )

    test_file = GeneratedFile()
    test_file.add_segment(segment_toc(), metadata, data)

    log_manager.set_level(logging.DEBUG)
    _ = test_file.load()

    assert "Reading metadata for object /'Group'/'Channel1' with index header 0x00001269" in caplog.text
    assert "scaler_type=4713" in caplog.text
    assert "scale_id=0" in caplog.text
    assert "data_type=Int16" in caplog.text
コード例 #11
0
ファイル: test_daqmx.py プロジェクト: walker202018/npTDMS
def test_two_channel_i16():
    """ Test loading a DAQmx file with two channels of I16 data
    """

    scaler_1 = daqmx_scaler_metadata(0, 3, 0)
    scaler_2 = daqmx_scaler_metadata(0, 3, 2)
    metadata = segment_objects_metadata(
        root_metadata(),
        group_metadata(),
        daqmx_channel_metadata("Channel1", 4, [4], [scaler_1]),
        daqmx_channel_metadata("Channel2", 4, [4], [scaler_2]))
    data = (
        # Data for segment
        "01 00"
        "11 00"
        "02 00"
        "12 00"
        "03 00"
        "13 00"
        "04 00"
        "14 00"
    )

    test_file = GeneratedFile()
    test_file.add_segment(segment_toc(), metadata, data)
    tdms_data = test_file.load()

    data_1 = tdms_data["Group"]["Channel1"].raw_data
    assert data_1.dtype == np.int16
    np.testing.assert_array_equal(data_1, [1, 2, 3, 4])

    data_2 = tdms_data["Group"]["Channel2"].raw_data
    assert data_2.dtype == np.int16
    np.testing.assert_array_equal(data_2, [17, 18, 19, 20])
コード例 #12
0
def test_slash_and_space_in_name():
    """Test name like '01/02/03 something'"""

    group_1 = "01/02/03 something"
    channel_1 = "04/05/06 another thing"
    group_2 = "01/02/03 a"
    channel_2 = "04/05/06 b"

    test_file = GeneratedFile()
    test_file.add_segment(
        ("kTocMetaData", "kTocRawData", "kTocNewObjList"),
        segment_objects_metadata(
            channel_metadata("/'{0}'/'{1}'".format(group_1, channel_1), 3, 2),
            channel_metadata("/'{0}'/'{1}'".format(group_2, channel_2), 3, 2),
        ), "01 00 00 00"
        "02 00 00 00"
        "03 00 00 00"
        "04 00 00 00")

    tdms_data = test_file.load()

    assert len(tdms_data.groups()) == 2
    assert len(tdms_data[group_1].channels()) == 1
    assert len(tdms_data[group_2].channels()) == 1
    data_1 = tdms_data[group_1][channel_1].data
    assert len(data_1) == 2
    data_2 = tdms_data[group_2][channel_2].data
    assert len(data_2) == 2
コード例 #13
0
def test_reading_subset_of_data(offset, length):
    channel_data = np.arange(0, 100, 1, dtype=np.int32)
    # Split data into different sized segments
    segment_data = [
        channel_data[0:10],
        channel_data[10:20],
        channel_data[20:60],
        channel_data[60:80],
        channel_data[80:90],
        channel_data[90:100],
    ]
    hex_segment_data = [
        "".join(hexlify_value('<i', x) for x in data) for data in segment_data
    ]
    test_file = GeneratedFile()
    test_file.add_segment(
        ("kTocMetaData", "kTocRawData", "kTocNewObjList"),
        segment_objects_metadata(channel_metadata("/'group'/'channel1'", 3,
                                                  5), ), hex_segment_data[0])
    for hex_data in hex_segment_data[1:]:
        test_file.add_segment(("kTocRawData", ), "", hex_data)

    with test_file.get_tempfile() as temp_file:
        with TdmsFile.open(temp_file.file) as tdms_file:
            channel_subset = tdms_file['group']['channel1'].read_data(
                offset, length)
            expected_data = channel_data[offset:offset + length]
            assert len(channel_subset) == len(expected_data)
            np.testing.assert_equal(channel_subset, expected_data)
コード例 #14
0
def timestamp_data():
    """Test reading contiguous timestamp data
    """

    times = [
        np.datetime64('2012-08-23T00:00:00.123', 'us'),
        np.datetime64('2012-08-23T01:02:03.456', 'us'),
        np.datetime64('2012-08-23T12:00:00.0', 'us'),
        np.datetime64('2012-08-23T12:02:03.9999', 'us'),
    ]

    test_file = GeneratedFile()
    test_file.add_segment(
        ("kTocMetaData", "kTocRawData", "kTocNewObjList"),
        segment_objects_metadata(
            channel_metadata("/'Group'/'TimeChannel1'", 0x44, 2),
            channel_metadata("/'Group'/'TimeChannel2'", 0x44, 2),
        ), timestamp_data_chunk(times))

    expected_data = {
        ('Group', 'TimeChannel1'): np.array([times[0], times[1]]),
        ('Group', 'TimeChannel2'): np.array([times[2], times[3]]),
    }

    return test_file, expected_data
コード例 #15
0
def test_read_interleaved_timestamp_data(benchmark):
    """ Benchmark reading a file with interleaved timestamp data
    """
    timestamps = np.tile(
        np.array([
            np.datetime64('2012-08-23T00:00:00.123', 'us'),
            np.datetime64('2012-08-23T01:02:03.456', 'us'),
            np.datetime64('2012-08-23T12:00:00.0', 'us'),
            np.datetime64('2012-08-23T12:02:03.9999', 'us'),
            np.datetime64('2012-08-23T12:02:03.9999', 'us'),
            np.datetime64('2012-08-23T00:00:00.123', 'us'),
            np.datetime64('2012-08-23T01:02:03.456', 'us'),
            np.datetime64('2012-08-23T12:00:00.0', 'us'),
            np.datetime64('2012-08-23T12:02:03.9999', 'us'),
            np.datetime64('2012-08-23T12:02:03.9999', 'us'),
        ]), 100)
    data = timestamp_data_chunk(timestamps)

    test_file = GeneratedFile()
    test_file.add_segment(
        ("kTocMetaData", "kTocRawData", "kTocNewObjList",
         "kTocInterleavedData"),
        segment_objects_metadata(
            channel_metadata("/'group'/'channel1'", 0x44, 100, {}),
            channel_metadata("/'group'/'channel2'", 0x44, 100, {}),
        ), data)

    tdms_file = benchmark(read_from_start, test_file.get_bytes_io_file())

    np.testing.assert_equal(tdms_file['group']['channel1'][:],
                            timestamps[0::2])
    np.testing.assert_equal(tdms_file['group']['channel2'][:],
                            timestamps[1::2])
コード例 #16
0
def test_stream_scaled_data_chunks(benchmark):
    """ Benchmark streaming channel data when the data is scaled
    """
    properties = {
        "NI_Number_Of_Scales": (3, "01 00 00 00"),
        "NI_Scale[0]_Scale_Type":
        (0x20, hexlify_value("<I", len("Linear")) + string_hexlify("Linear")),
        "NI_Scale[0]_Linear_Slope": (10, hexlify_value("<d", 2.0)),
        "NI_Scale[0]_Linear_Y_Intercept": (10, hexlify_value("<d", 10.0))
    }
    test_file = GeneratedFile()
    data_array = np.arange(0, 1000, dtype=np.dtype('int32'))
    data = data_array.tobytes()
    test_file.add_segment(
        ("kTocMetaData", "kTocRawData", "kTocNewObjList"),
        segment_objects_metadata(
            channel_metadata("/'group'/'channel1'", TDS_TYPE_INT32, 100,
                             properties), ),
        data,
        binary_data=True)
    for _ in range(0, 9):
        test_file.add_segment(("kTocRawData", ), "", data, binary_data=True)

    with TdmsFile.open(test_file.get_bytes_io_file()) as tdms_file:
        channel = tdms_file['group']['channel1']
        channel_data = benchmark(stream_chunks, channel)

        channel_data = np.concatenate(channel_data)
        expected_data = np.tile(10.0 + 2.0 * data_array, 10)
        np.testing.assert_equal(channel_data, expected_data)
コード例 #17
0
def test_read_raw_timestamp_data():
    """ Test reading timestamp data as a raw TDMS timestamps
    """
    test_file = GeneratedFile()
    seconds = 3672033330
    second_fractions = 1234567890 * 10**10
    test_file.add_segment(
        ("kTocMetaData", "kTocRawData", "kTocNewObjList"),
        segment_objects_metadata(
            channel_metadata("/'group'/'channel1'", 0x44, 4), ),
        hexlify_value("<Q", 0) + hexlify_value("<q", seconds) +
        hexlify_value("<Q", second_fractions) + hexlify_value("<q", seconds) +
        hexlify_value("<Q", 0) + hexlify_value("<q", seconds + 1) +
        hexlify_value("<Q", second_fractions) +
        hexlify_value("<q", seconds + 1))

    expected_seconds = np.array([seconds, seconds, seconds + 1, seconds + 1],
                                np.dtype('int64'))
    expected_second_fractions = np.array(
        [0, second_fractions, 0, second_fractions], np.dtype('uint64'))

    with test_file.get_tempfile() as temp_file:
        tdms_data = TdmsFile.read(temp_file.file, raw_timestamps=True)
        data = tdms_data['group']['channel1'][:]
        assert isinstance(data, TimestampArray)
        np.testing.assert_equal(data.seconds, expected_seconds)
        np.testing.assert_equal(data.second_fractions,
                                expected_second_fractions)
コード例 #18
0
ファイル: test_hdf.py プロジェクト: xiangnandang/npTDMS
def test_floating_point_data_types(tmp_path):
    """ Test conversion of f32 and f64 types to HDF
    """
    test_file = GeneratedFile()
    test_file.add_segment(
        ("kTocMetaData", "kTocRawData", "kTocNewObjList"),
        segment_objects_metadata(
            channel_metadata("/'group'/'f32'", 9, 4),
            channel_metadata("/'group'/'f64'", 10, 4),
        ),
        hexlify_value('<f', 1) +
        hexlify_value('<f', 2) +
        hexlify_value('<f', 3) +
        hexlify_value('<f', 4) +
        hexlify_value('<d', 1) +
        hexlify_value('<d', 2) +
        hexlify_value('<d', 3) +
        hexlify_value('<d', 4)
    )

    tdms_data = test_file.load()
    h5_path = tmp_path / 'h5_data_test.h5'
    h5 = tdms_data.as_hdf(h5_path)

    for chan, expected_dtype in [
            ('f32', np.dtype('float32')),
            ('f64', np.dtype('float64'))]:
        h5_channel = h5['group'][chan]
        assert h5_channel.dtype == expected_dtype
        np.testing.assert_almost_equal(h5_channel[...], [1.0, 2.0, 3.0, 4.0])
    h5.close()
コード例 #19
0
ファイル: test_daqmx.py プロジェクト: xiangnandang/npTDMS
def test_mixed_channel_widths():
    """ Test loading a DAQmx file with channels with different widths
    """

    scaler_1 = daqmx_scaler_metadata(0, 1, 0)
    scaler_2 = daqmx_scaler_metadata(0, 3, 1)
    scaler_3 = daqmx_scaler_metadata(0, 5, 3)
    metadata = segment_objects_metadata(
        root_metadata(), group_metadata(),
        daqmx_channel_metadata("Channel1", 4, [7], [scaler_1]),
        daqmx_channel_metadata("Channel2", 4, [7], [scaler_2]),
        daqmx_channel_metadata("Channel3", 4, [7], [scaler_3]))
    data = (
        # Data for segment
        "01 11 00 21 00 00 00"
        "02 12 00 22 00 00 00"
        "03 13 00 23 00 00 00"
        "04 14 00 24 00 00 00")

    test_file = GeneratedFile()
    test_file.add_segment(segment_toc(), metadata, data)
    tdms_data = test_file.load()

    data_1 = tdms_data["Group"]["Channel1"].raw_data
    assert data_1.dtype == np.int8
    np.testing.assert_array_equal(data_1, [1, 2, 3, 4])

    data_2 = tdms_data["Group"]["Channel2"].raw_data
    assert data_2.dtype == np.int16
    np.testing.assert_array_equal(data_2, [17, 18, 19, 20])

    data_3 = tdms_data["Group"]["Channel3"].raw_data
    assert data_3.dtype == np.int32
    np.testing.assert_array_equal(data_3, [33, 34, 35, 36])
コード例 #20
0
def float_data_with_unit():
    """ Test reading a file with float valued data with units

        These are the same as normal floating point data but have a 'unit_string' property
    """
    single_arr = np.array([0.123, 0.234, 0.345, 0.456], dtype=np.float32)
    double_arr = np.array([0.987, 0.876, 0.765, 0.654], dtype=np.double)
    data = ""
    for num in single_arr[0:2]:
        data += hexlify_value("<f", num)
    for num in double_arr[0:2]:
        data += hexlify_value("<d", num)
    for num in single_arr[2:4]:
        data += hexlify_value("<f", num)
    for num in double_arr[2:4]:
        data += hexlify_value("<d", num)

    test_file = GeneratedFile()
    test_file.add_segment(("kTocMetaData", "kTocRawData", "kTocNewObjList"),
                          segment_objects_metadata(
                              channel_metadata("/'group'/'single_channel'",
                                               TDS_TYPE_FLOAT32_WITH_UNIT, 2),
                              channel_metadata("/'group'/'double_channel'",
                                               TDS_TYPE_FLOAT64_WITH_UNIT, 2),
                          ), data)
    expected_data = {
        ('group', 'single_channel'): single_arr,
        ('group', 'double_channel'): double_arr,
    }
    return test_file, expected_data
コード例 #21
0
def complex_data():
    """ Test reading a file with complex valued data
    """
    complex_single_arr = np.array([1 + 2j, 3 + 4j], dtype=np.complex64)
    complex_double_arr = np.array([5 + 6j, 7 + 8j], dtype=np.complex128)
    data = ""
    for num in complex_single_arr:
        data += hexlify_value("<f", num.real)
        data += hexlify_value("<f", num.imag)
    for num in complex_double_arr:
        data += hexlify_value("<d", num.real)
        data += hexlify_value("<d", num.imag)

    test_file = GeneratedFile()
    test_file.add_segment(
        ("kTocMetaData", "kTocRawData", "kTocNewObjList"),
        segment_objects_metadata(
            channel_metadata("/'group'/'complex_single_channel'",
                             TDS_TYPE_COMPLEX64, 2),
            channel_metadata("/'group'/'complex_double_channel'",
                             TDS_TYPE_COMPLEX128, 2),
        ), data)
    expected_data = {
        ('group', 'complex_single_channel'): complex_single_arr,
        ('group', 'complex_double_channel'): complex_double_arr,
    }
    return test_file, expected_data
コード例 #22
0
def incomplete_last_row_of_interleaved_data():
    """ Test incomplete last row of interleaved data
    """
    test_file = GeneratedFile()
    test_file.add_segment(
        ("kTocMetaData", "kTocRawData", "kTocNewObjList",
         "kTocInterleavedData"),
        segment_objects_metadata(
            channel_metadata("/'group'/'channel1'", TDS_TYPE_INT32, 2),
            channel_metadata("/'group'/'channel2'", TDS_TYPE_INT32, 2),
        ), "01 00 00 00"
        "02 00 00 00"
        "03 00 00 00"
        "04 00 00 00"
        "05 00 00 00"
        "06 00 00 00"
        "07 00 00 00"
        "08 00 00 00"
        "09 00 00 00"
        "0A 00 00 00"
        "0B 00 00 00",
        incomplete=True)
    expected_data = {
        ('group', 'channel1'): np.array([1, 3, 5, 7, 9], dtype=np.int32),
        ('group', 'channel2'): np.array([2, 4, 6, 8, 10], dtype=np.int32),
    }
    return test_file, expected_data
コード例 #23
0
def float_data():
    """ Test reading a file with float valued data
    """
    single_arr = np.array([0.123, 0.234, 0.345, 0.456], dtype=np.float32)
    double_arr = np.array([0.987, 0.876, 0.765, 0.654], dtype=np.double)
    data = ""
    for num in single_arr[0:2]:
        data += hexlify_value("<f", num)
    for num in double_arr[0:2]:
        data += hexlify_value("<d", num)
    for num in single_arr[2:4]:
        data += hexlify_value("<f", num)
    for num in double_arr[2:4]:
        data += hexlify_value("<d", num)

    test_file = GeneratedFile()
    test_file.add_segment(
        ("kTocMetaData", "kTocRawData", "kTocNewObjList"),
        segment_objects_metadata(
            channel_metadata("/'group'/'single_channel'", TDS_TYPE_FLOAT32, 2),
            channel_metadata("/'group'/'double_channel'", TDS_TYPE_FLOAT64, 2),
        ), data)
    expected_data = {
        ('group', 'single_channel'): single_arr,
        ('group', 'double_channel'): double_arr,
    }
    return test_file, expected_data
コード例 #24
0
def incomplete_last_segment():
    """ Test incomplete last segment, eg. if LabView crashed
    """
    test_file = GeneratedFile()
    test_file.add_segment(
        ("kTocMetaData", "kTocRawData", "kTocNewObjList"),
        segment_objects_metadata(
            channel_metadata("/'group'/'channel1'", TDS_TYPE_INT32, 2),
            channel_metadata("/'group'/'channel2'", TDS_TYPE_INT32, 2),
        ), "01 00 00 00"
        "02 00 00 00"
        "03 00 00 00"
        "04 00 00 00"
        "05 00 00 00"
        "06 00 00 00"
        "07 00 00 00"
        "08 00 00 00")
    test_file.add_segment(("kTocRawData", ),
                          "", "09 00 00 00"
                          "0A 00 00 00"
                          "0B 00 00 00"
                          "0C 00 00 00",
                          incomplete=True)
    expected_data = {
        ('group', 'channel1'): np.array([1, 2, 5, 6, 9, 10], dtype=np.int32),
        ('group', 'channel2'): np.array([3, 4, 7, 8, 11, 12], dtype=np.int32),
    }
    return test_file, expected_data
コード例 #25
0
def less_data_than_expected_interleaved():
    """ Add interleaved data segment and then a repeated segment without
        any lead in or metadata, so data is read in chunks,
        but the extra chunk does not have as much data as expected.
    """
    test_file = GeneratedFile()
    test_file.add_segment(
        ("kTocMetaData", "kTocRawData", "kTocNewObjList",
         "kTocInterleavedData"),
        segment_objects_metadata(
            channel_metadata("/'group'/'channel1'", TDS_TYPE_INT32, 3),
            channel_metadata("/'group'/'channel2'", TDS_TYPE_INT32, 3),
        ), "01 00 00 00"
        "02 00 00 00"
        "03 00 00 00"
        "04 00 00 00"
        "05 00 00 00"
        "06 00 00 00"
        "07 00 00 00"
        "08 00 00 00"
        "09 00 00 00"
        "0A 00 00 00")
    expected_data = {
        ('group', 'channel1'): np.array([1, 3, 5, 7, 9], dtype=np.int32),
        ('group', 'channel2'): np.array([2, 4, 6, 8, 10], dtype=np.int32),
    }
    return test_file, expected_data
コード例 #26
0
def chunked_interleaved_segment():
    """ Add interleaved segment and then a repeated segment without
        any lead in or metadata, so data is read in chunks
    """
    test_file = GeneratedFile()
    test_file.add_segment(
        ("kTocMetaData", "kTocRawData", "kTocNewObjList",
         "kTocInterleavedData"),
        segment_objects_metadata(
            channel_metadata("/'group'/'channel1'", TDS_TYPE_INT32, 2),
            channel_metadata("/'group'/'channel2'", TDS_TYPE_INT32, 2),
        ), "01 00 00 00"
        "02 00 00 00"
        "03 00 00 00"
        "04 00 00 00"
        "05 00 00 00"
        "06 00 00 00"
        "07 00 00 00"
        "08 00 00 00")
    test_file.add_segment(("kTocRawData", "kTocInterleavedData"), "",
                          "07 00 00 00"
                          "08 00 00 00"
                          "05 00 00 00"
                          "06 00 00 00"
                          "03 00 00 00"
                          "04 00 00 00"
                          "01 00 00 00"
                          "02 00 00 00")
    expected_data = {
        ('group', 'channel1'): np.array([1, 3, 5, 7, 7, 5, 3, 1],
                                        dtype=np.int32),
        ('group', 'channel2'): np.array([2, 4, 6, 8, 8, 6, 4, 2],
                                        dtype=np.int32),
    }
    return test_file, expected_data
コード例 #27
0
def chunked_segment():
    """ Add segment and then a repeated segment without
        any lead in or metadata, so data is read in chunks
    """
    test_file = GeneratedFile()
    test_file.add_segment(
        ("kTocMetaData", "kTocRawData", "kTocNewObjList"),
        segment_objects_metadata(
            channel_metadata("/'group'/'channel1'", TDS_TYPE_INT32, 2),
            channel_metadata("/'group'/'channel2'", TDS_TYPE_INT32, 2),
        ), "00 00 00 00"
        "01 00 00 00"
        "0A 00 00 00"
        "0B 00 00 00"
        "02 00 00 00"
        "03 00 00 00"
        "0C 00 00 00"
        "0D 00 00 00")
    test_file.add_segment(("kTocRawData", ), "", "04 00 00 00"
                          "05 00 00 00"
                          "0E 00 00 00"
                          "0F 00 00 00"
                          "06 00 00 00"
                          "07 00 00 00"
                          "10 00 00 00"
                          "11 00 00 00")
    expected_data = {
        ('group', 'channel1'):
        np.array([0, 1, 2, 3, 4, 5, 6, 7], dtype=np.int32),
        ('group', 'channel2'):
        np.array([10, 11, 12, 13, 14, 15, 16, 17], dtype=np.int32),
    }
    return test_file, expected_data
コード例 #28
0
ファイル: test_daqmx.py プロジェクト: xiangnandang/npTDMS
def test_multiple_raw_data_buffers_with_different_widths():
    """ DAQmx with raw data buffers with different widths
    """

    scaler_1 = daqmx_scaler_metadata(0, 3, 0, 0)
    scaler_2 = daqmx_scaler_metadata(0, 3, 2, 0)
    scaler_3 = daqmx_scaler_metadata(0, 3, 4, 0)
    scaler_4 = daqmx_scaler_metadata(0, 5, 0, 1)
    scaler_5 = daqmx_scaler_metadata(0, 5, 4, 1)
    metadata = segment_objects_metadata(
        root_metadata(), group_metadata(),
        daqmx_channel_metadata("Channel1", 4, [6, 8], [scaler_1]),
        daqmx_channel_metadata("Channel2", 4, [6, 8], [scaler_2]),
        daqmx_channel_metadata("Channel3", 4, [6, 8], [scaler_3]),
        daqmx_channel_metadata("Channel4", 4, [6, 8], [scaler_4]),
        daqmx_channel_metadata("Channel5", 4, [6, 8], [scaler_5]))
    data = ("01 00"
            "02 00"
            "03 00"
            "04 00"
            "05 00"
            "06 00"
            "07 00"
            "08 00"
            "09 00"
            "0A 00"
            "0B 00"
            "0C 00"
            "0D 00 00 00"
            "0E 00 00 00"
            "0F 00 00 00"
            "10 00 00 00"
            "11 00 00 00"
            "12 00 00 00"
            "13 00 00 00"
            "14 00 00 00")

    test_file = GeneratedFile()
    test_file.add_segment(segment_toc(), metadata, data)
    tdms_data = test_file.load()

    data_1 = tdms_data["Group"]["Channel1"].raw_data
    data_2 = tdms_data["Group"]["Channel2"].raw_data
    data_3 = tdms_data["Group"]["Channel3"].raw_data
    data_4 = tdms_data["Group"]["Channel4"].raw_data
    data_5 = tdms_data["Group"]["Channel5"].raw_data

    for data in [data_1, data_2, data_3]:
        assert data.dtype == np.int16
    for data in [data_4, data_5]:
        assert data.dtype == np.int32

    np.testing.assert_array_equal(data_1, [1, 4, 7, 10])
    np.testing.assert_array_equal(data_2, [2, 5, 8, 11])
    np.testing.assert_array_equal(data_3, [3, 6, 9, 12])
    np.testing.assert_array_equal(data_4, [13, 15, 17, 19])
    np.testing.assert_array_equal(data_5, [14, 16, 18, 20])
コード例 #29
0
def extra_padding_after_metadata():
    test_file = GeneratedFile()
    test_file.add_segment(
        ("kTocMetaData", "kTocRawData", "kTocNewObjList"),
        segment_objects_metadata(
            channel_metadata("/'group'/'channel1'", TDS_TYPE_INT32, 2), ) +
        "00 00 00 00 00 00 00 00", "01 00 00 00"
        "02 00 00 00")
    test_file.add_segment(
        ("kTocMetaData", "kTocRawData", "kTocNewObjList"),
        segment_objects_metadata(
            channel_metadata("/'group'/'channel1'", TDS_TYPE_INT32, 2), ) +
        "00 00 00 00 00 00 00 00", "03 00 00 00"
        "04 00 00 00")
    expected_data = {
        ('group', 'channel1'): np.array([1, 2, 3, 4], dtype=np.int32),
    }
    return test_file, expected_data
コード例 #30
0
def channel_without_data_or_data_type():
    test_file = GeneratedFile()
    test_file.add_segment(
        ("kTocMetaData", "kTocNewObjList"),
        segment_objects_metadata(
            channel_metadata_with_no_data("/'group'/'channel1'"), ), "")
    expected_data = {
        ('group', 'channel1'): np.array([], dtype=np.dtype('void')),
    }
    return test_file, expected_data