Esempio n. 1
0
def test_decode(zmq_io_obj):
    io_chain = 1
    test_packet = Packet(b'\x01' * Packet.num_bytes)
    test_packet.chip_key = Key(1, io_chain, test_packet.chip_id)
    test_bytes = dataserver_message_encode([test_packet])
    expected = [test_packet]
    assert zmq_io_obj.decode(test_bytes) == expected
def test_record(tmpdir):
    logger = HDF5Logger(directory=str(tmpdir), buffer_length=1)
    logger.open()

    logger.record([Packet()], timestamp=5.0)
    assert logger._buffer['raw_packet'][0] == HDF5Logger.encode(Packet(),
                                                                timestamp=5.0)
Esempio n. 3
0
def test_encode(zmq_io_obj):
    chip_id = 64
    io_chain = 1
    test_packet = Packet(b'\x00\x01\x02\x03\x04\x05\x06')
    test_packet.chip_key = Key(1, io_chain, chip_id)
    test_bytes = b'0x0006050403020100 1'
    expected = [test_bytes]
    assert zmq_io_obj.encode([test_packet]) == expected
Esempio n. 4
0
def test_decode(zmq_io_obj):
    chip_id = 64
    io_chain = 1
    test_packet = Packet(b'\x00\x01\x02\x03\x04\x05\x06')
    test_packet.chip_key = Key(1, io_chain, chip_id)
    test_bytes = dataserver_message_encode([test_packet])
    expected = [test_packet]
    assert zmq_io_obj.decode(test_bytes) == expected
Esempio n. 5
0
def test_encode(zmq_io_obj):
    io_chain = 1
    test_packet = Packet(b'\x01' * Packet.num_bytes)
    test_packet.chip_key = Key(1, io_chain, test_packet.chip_id)
    test_bytes = b'0x0101010101010101 1'
    if isinstance(test_packet, Packet_v1):
        test_bytes = b'0x0001010101010101 1'
    expected = [test_bytes]
    assert zmq_io_obj.encode([test_packet]) == expected
def test_enable(tmpdir):
    logger = HDF5Logger(directory=str(tmpdir))
    assert not logger.is_enabled()
    logger.record([Packet()])
    assert len(logger._buffer['packets']) == 0
    logger.enable()
    assert logger.is_enabled()
    logger.record([Packet()])
    assert len(logger._buffer['packets']) == 1
def config_read_packet():
    p = Packet()
    p.packet_type = Packet.CONFIG_READ_PACKET
    p.chipid = 123
    p.register_address = 10
    p.register_data = 23
    p.assign_parity()
    p.chip_key = Key('1-2-123')
    p.direction = 1
    return p
def test_flush(tmpdir):
    logger = HDF5Logger(directory=str(tmpdir), buffer_length=5, enabled=True)
    logger.record([Packet()])
    assert len(logger._buffer['packets']) == 1
    logger.flush()
    assert len(logger._buffer['packets']) == 0
    logger.record([Packet()] * 5)
    assert len(logger._buffer['packets']) == 5
    logger.record([Packet()])
    assert len(logger._buffer['packets']) == 0
Esempio n. 9
0
def test_encode(multizmq_io_obj):
    chip_id = 64
    io_chain = 1
    io_group = list(multizmq_io_obj._io_group_table)[0]
    test_packet = Packet(b'\x00\x01\x02\x03\x04\x05\x06')
    test_packet.io_group = io_group
    test_packet.io_channel = io_chain
    test_bytes = b'0x0006050403020100 1'
    expected = [test_bytes]
    assert multizmq_io_obj.encode([test_packet]) == expected
Esempio n. 10
0
def test_decode(multizmq_io_obj):
    chip_id = 64
    io_chain = 1
    io_group = list(multizmq_io_obj._io_group_table)[0]
    address = str(multizmq_io_obj._io_group_table[io_group])
    test_packet = Packet(b'\x00\x01\x02\x03\x04\x05\x06')
    test_packet.io_group = io_group
    test_packet.io_channel = io_chain
    test_bytes = dataserver_message_encode([test_packet])
    expected = [test_packet]
    assert multizmq_io_obj.decode(test_bytes, address=address) == expected
Esempio n. 11
0
def test_flush(tmpdir):
    logger = HDF5Logger(directory=str(tmpdir), buffer_length=5)
    logger.open()

    logger.record([Packet()])
    assert len(logger._buffer['raw_packet']) == 1
    logger.flush()
    assert len(logger._buffer['raw_packet']) == 0
    logger.record([Packet()] * 5)
    assert len(logger._buffer['raw_packet']) == 5
    logger.record([Packet()])
    assert len(logger._buffer['raw_packet']) == 0
Esempio n. 12
0
def test_encode(multizmq_io_obj):
    chip_id = 64
    io_chain = 1
    io_group = list(multizmq_io_obj._io_group_table)[0]
    test_packet = Packet(b'\x01' * Packet.num_bytes)
    test_packet.io_group = io_group
    test_packet.io_channel = io_chain
    test_bytes = b'0x0101010101010101 1'
    if Packet == Packet_v1:
        test_bytes = b'0x0001010101010101 1'
    expected = [test_bytes]
    assert multizmq_io_obj.encode([test_packet]) == expected
def test_flush_deprecated_3_0_0(tmpdir):
    logger = HDF5Logger(directory=str(tmpdir), buffer_length=5)
    with pytest.warns(DeprecationWarning):
        logger.open()

    logger.record([Packet()])
    assert len(logger._buffer['packets']) == 1
    logger.flush()
    assert len(logger._buffer['packets']) == 0
    logger.record([Packet()] * 5)
    assert len(logger._buffer['packets']) == 5
    logger.record([Packet()])
    assert len(logger._buffer['packets']) == 0
Esempio n. 14
0
def dataserver_message_decode(msgs, version=(1, 0), **kwargs):
    r'''
    Convert a list of larpix data server messages into packets. Additional packet meta data can be passed along via kwargs E.g.::

        msg = b'\x01\x00D\x01\x00\x00\x00\x00\x04\x00\x00\x00\x00\x00\x00'
        packets = dataserver_message_decode([msg], io_group=1)
        packets[0] # Packet(b'\x04\x00\x00\x00\x00\x00\x00'), key of '1-1-1'

    :param msgs: list of bytestream messages each starting with a single 8-byte header word, followed by N 8-byte data words

    :param version: optional, message version to validate against, ``tuple`` of major, minor version numbers

    :returns: list of ``larpix.Packet`` and ``larpix.TimestampPacket`` objects

    '''
    packets = []
    for msg in msgs:
        major, minor = struct.unpack('BB', msg[:2])
        if (major, minor) != version:
            warnings.warn(
                'Message version mismatch! Expected {}, received {}'.format(
                    version, (major, minor)))
        msg_type = struct.unpack('c', msg[2:3])[0]
        if msg_type == b'T':
            timestamp = struct.unpack('L', msg[8:15] +
                                      b'\x00')[0]  # only use 7-bytes
            packets.append(TimestampPacket(timestamp=timestamp))
            if kwargs:
                for key, value in kwargs.items():
                    setattr(packets[-1], key, value)
        elif msg_type == b'D':
            io_chain = struct.unpack('B', msg[3:4])[0]
            payload = msg[8:]
            if len(payload) % 8 == 0:
                for start_index in range(0, len(payload), 8):
                    packet_bytes = payload[start_index:start_index + 8]
                    if Packet == Packet_v1:
                        packets.append(Packet(packet_bytes[:-1]))
                    elif Packet == Packet_v2:
                        packets.append(Packet(packet_bytes))
                    packets[-1].io_channel = io_chain
                    if kwargs:
                        for key, value in kwargs.items():
                            setattr(packets[-1], key, value)
        elif msg_type == b'H':
            print('Heartbeat message: {}'.format(msg[3:]))
    return packets
Esempio n. 15
0
def test_enable(tmpdir):
    logger = HDF5Logger(directory=str(tmpdir), buffer_length=1)
    logger.open()

    logger.enable()
    assert logger.is_enabled()
    logger.record([Packet()])
    assert len(logger._buffer['raw_packet']) == 1
def test_controller_read_capture(tmpdir):
    controller = Controller()
    controller.io = FakeIO()
    controller.io.queue.append(([Packet()], b'\x00\x00'))
    controller.logger = HDF5Logger(directory=str(tmpdir), buffer_length=1)
    controller.logger.enable()
    controller.run(0.1, 'test')
    assert len(controller.logger._buffer['packets']) == 1
Esempio n. 17
0
def test_disable(tmpdir):
    logger = HDF5Logger(directory=str(tmpdir))
    logger.open()

    logger.disable()
    assert not logger.is_enabled()
    logger.record([Packet()])
    assert len(logger._buffer['raw_packet']) == 0
Esempio n. 18
0
def test_controller_read_capture(capfd):
    controller = Controller()
    controller.io = FakeIO()
    controller.io.queue.append(([Packet()], b'\x00\x00'))
    controller.logger = StdoutLogger(buffer_length=100)
    controller.logger.enable()
    controller.run(0.1,'test')
    assert len(controller.logger._buffer) == 1
def dataserver_message_decode(msgs,
                              key_generator=None,
                              version=(1, 0),
                              **kwargs):
    r'''
    Convert a list of larpix data server messages into packets. A key generator
    should be provided if packets are to be used with an ``larpix.io.IO``
    object. The data server messages provide a ``chip_id`` and ``io_chain`` for
    keys. Additional keyword arguments can be passed along to the key generator. E.g.::

        from larpix.larpix import Key
        def ex_key_gen(chip_id, io_chain, io_group):
            return Key(Key.key_format.format(
                chip_id=chip_id,
                io_channel=io_chain,
                io_group=io_group
            ))

        msg = b'\x01\x00D\x01\x00\x00\x00\x00\x04\x00\x00\x00\x00\x00\x00'
        packets = dataserver_message_decode([msg], key_generator=ex_key_gen, io_group=1)
        packets[0] # Packet(b'\x04\x00\x00\x00\x00\x00\x00'), key of '1-1-1'

    :param msgs: list of bytestream messages each starting with a single 8-byte header word, followed by N 8-byte data words

    :param key_generator: optional, a method that takes ``chip_id`` and ``io_chain`` as arguments and returns a ``larpix.Key`` object

    :param version: optional, message version to validate against, ``tuple`` of major, minor version numbers

    :returns: list of ``larpix.Packet`` and ``larpix.TimestampPacket`` objects

    '''
    packets = []
    for msg in msgs:
        major, minor = struct.unpack('BB', msg[:2])
        if (major, minor) != version:
            warnings.warn(
                'Message version mismatch! Expected {}, received {}'.format(
                    version, (major, minor)))
        msg_type = struct.unpack('c', msg[2:3])[0]
        if msg_type == b'T':
            timestamp = struct.unpack('L', msg[8:15] +
                                      b'\x00')[0]  # only use 7-bytes
            packets.append(TimestampPacket(timestamp=timestamp))
        elif msg_type == b'D':
            io_chain = struct.unpack('B', msg[3:4])[0]
            payload = msg[8:]
            if len(payload) % 8 == 0:
                for start_index in range(0, len(payload), 8):
                    packet_bytes = payload[start_index:start_index + 7]
                    packets.append(Packet(packet_bytes))
                    if key_generator:
                        packets[-1].chip_key = key_generator(
                            chip_id=packets[-1].chipid,
                            io_chain=io_chain,
                            **kwargs)
        elif msg_type == b'H':
            print('Heartbeat message: {}'.format(msg[3:]))
    return packets
def test_record_deprecated_3_0_0(tmpdir):
    logger = HDF5Logger(directory=str(tmpdir))
    with pytest.warns(DeprecationWarning):
        logger.open()

    logger.record([Packet()])
    assert len(logger._buffer['packets']) == 1
    logger.record([TimestampPacket(timestamp=123)])
    assert len(logger._buffer['packets']) == 2
def test_controller_read_capture_deprecated_3_0_0(tmpdir):
    controller = Controller()
    controller.io = FakeIO()
    controller.io.queue.append(([Packet()], b'\x00\x00'))
    controller.logger = HDF5Logger(directory=str(tmpdir), buffer_length=1)
    with pytest.warns(DeprecationWarning):
        controller.logger.open()
    controller.run(0.1, 'test')
    assert len(controller.logger._buffer['packets']) == 1
def test_controller_read_capture(capfd):
    controller = Controller()
    controller.io = FakeIO()
    controller.io.queue.append(([Packet()], b'\x00\x00'))
    controller.logger = StdoutLogger(buffer_length=100)
    with pytest.warns(DeprecationWarning):
        controller.logger.open()
    controller.run(0.1,'test')
    assert len(controller.logger._buffer) == 1
def test_disable_deprecated_3_0_0(tmpdir):
    logger = HDF5Logger(directory=str(tmpdir))
    with pytest.warns(DeprecationWarning):
        logger.open()

    logger.disable()
    assert not logger.is_enabled()
    logger.record([Packet()])
    assert len(logger._buffer['packets']) == 0
def test_to_file_v1_0_config_read_packet(tmpfile, config_read_packet):
    to_file(tmpfile, [config_read_packet], version='1.0')
    f = h5py.File(tmpfile, 'r')
    assert len(f['packets']) == 1
    row = f['packets'][0]
    props = dtype_property_index_lookup['1.0']['packets']
    new_packet = Packet()
    new_packet.chip_key = row[props['chip_key']]
    new_packet.packet_type = row[props['type']]
    new_packet.chipid = row[props['chipid']]
    new_packet.parity_bit_value = row[props['parity']]
    new_packet.register_address = row[props['register']]
    new_packet.register_data = row[props['value']]
    new_packet.direction = row[props['direction']]
    assert new_packet == config_read_packet
def test_min_example(capsys):
    from larpix.larpix import Controller, Packet
    from larpix.io.fakeio import FakeIO
    from larpix.logger.stdout_logger import StdoutLogger
    controller = Controller()
    controller.io = FakeIO()
    controller.logger = StdoutLogger(buffer_length=0)
    controller.logger.enable()
    chip1 = controller.add_chip('1-1-1')  # (access key)
    chip1.config.global_threshold = 25
    controller.write_configuration('1-1-1', 25)  # chip key, register 25
    assert capsys.readouterr(
    ).out == '[ Chip key: 1-1-1 | Chip: 1 | Config write | Register: 25 | Value:  16 | Parity: 1 (valid: True) ]\nRecord: [ Chip key: 1-1-1 | Chip: 1 | Config write | Register: 25 | Value:  16 | Parity: 1 (valid: True) ]\n'
    packet = Packet(b'\x04\x14\x80\xc4\x03\xf2 ')
    packet_bytes = packet.bytes()
    pretend_input = ([packet], packet_bytes)
    controller.io.queue.append(pretend_input)
    controller.run(0.05, 'test run')
    print(controller.reads[0])
    assert capsys.readouterr(
    ).out == 'Record: [ Chip key: None | Chip: 1 | Data | Channel: 5 | Timestamp: 123456 | ADC data: 120 | FIFO Half: False | FIFO Full: False | Parity: 1 (valid: True) ]\n[ Chip key: None | Chip: 1 | Data | Channel: 5 | Timestamp: 123456 | ADC data: 120 | FIFO Half: False | FIFO Full: False | Parity: 1 (valid: True) ]\n'
Esempio n. 26
0
    def decode(cls, msgs, io_chain=0, address=None, **kwargs):
        '''
        Convert a list ZMQ messages into packets

        '''
        packets = []
        for msg in msgs:
            if len(msg) % 8 == 0:
                for start_index in range(0, len(msg), 8):
                    packet_bytes = msg[start_index:start_index + 7]
                    packets.append(Packet(packet_bytes))
                    packets[-1].chip_key = cls.generate_chip_key(
                        chip_id=packets[-1].chipid,
                        io_chain=io_chain,
                        address=str(address))
        return packets
Esempio n. 27
0
 def _parse_input(bytestream):
     packet_size = SerialPort.fpga_packet_size
     start_byte = SerialPort.start_byte[0]
     stop_byte = SerialPort.stop_byte[0]
     metadata_byte_index = 8
     data_bytes = slice(1, 8)
     # parse the bytestream into Packets + metadata
     byte_packets = []
     skip_slices = []
     bytestream_len = len(bytestream)
     last_possible_start = bytestream_len - packet_size
     index = 0
     while index <= last_possible_start:
         if (bytestream[index] == start_byte
                 and bytestream[index + packet_size - 1] == stop_byte):
             '''
             metadata = current_stream[metadata_byte_index]
             # This is necessary because of differences between
             # Python 2 and Python 3
             if isinstance(metadata, int):  # Python 3
                 code = 'uint:8='
             elif isinstance(metadata, str):  # Python 2
                 code = 'bytes:1='
             byte_packets.append((Bits(code + str(metadata)),
                 Packet(current_stream[data_bytes])))
             '''
             byte_packets.append(Packet(bytestream[index + 1:index + 8]))
             index += packet_size
         else:
             # Throw out everything between here and the next start byte.
             # Note: start searching after byte 0 in case it's
             # already a start byte
             index = bytestream.find(start_byte, index + 1)
             if index == -1:
                 index = bytestream_len
     return byte_packets
def test_record(tmpdir):
    logger = HDF5Logger(directory=str(tmpdir), enabled=True)
    logger.record([Packet()])
    assert len(logger._buffer['packets']) == 1
    logger.record([TimestampPacket(timestamp=123)])
    assert len(logger._buffer['packets']) == 2
def test_tutorial(capsys, tmpdir, temp_logfilename):
    from larpix.larpix import Controller, Packet

    from larpix.io.fakeio import FakeIO
    from larpix.logger.stdout_logger import StdoutLogger
    controller = Controller()
    controller.io = FakeIO()
    controller.logger = StdoutLogger(buffer_length=0)
    controller.logger.enable()

    chip_key = '1-1-5'
    chip5 = controller.add_chip(chip_key)
    chip5 = controller.get_chip(chip_key)

    from larpix.larpix import Key
    example_key = Key('1-2-3')

    assert example_key.io_group == 1
    assert example_key.io_channel == 2
    assert example_key.chip_id == 3
    example_key.to_dict()

    chip5.config.global_threshold = 35  # entire register = 1 number
    chip5.config.periodic_reset = 1  # one bit as part of a register
    chip5.config.channel_mask[20] = 1  # one bit per channel

    controller.write_configuration(chip_key)  # send all registers
    controller.write_configuration(chip_key, 32)  # send only register 32
    controller.write_configuration(chip_key,
                                   [32, 50])  # send registers 32 and 50

    global_threshold_reg = chip5.config.global_threshold_address

    packets = chip5.get_configuration_packets(Packet.CONFIG_READ_PACKET)
    bytestream = b'bytes for the config read packets'
    controller.io.queue.append((packets, bytestream))

    controller.read_configuration(chip_key)

    packets = [Packet()] * 40
    bytestream = b'bytes from the first set of packets'
    controller.io.queue.append((packets, bytestream))
    packets2 = [Packet()] * 30
    bytestream2 = b'bytes from the second set of packets'
    controller.io.queue.append((packets2, bytestream2))

    controller.start_listening()
    # Data arrives...
    packets, bytestream = controller.read()
    # More data arrives...
    packets2, bytestream2 = controller.read()
    controller.stop_listening()
    message = 'First data arrived!'
    message2 = 'More data arrived!'
    controller.store_packets(packets, bytestream, message)
    controller.store_packets(packets, bytestream2, message2)

    packets = [Packet()] * 5
    bytestream = b'[bytes from read #%d] '
    for i in range(100):
        controller.io.queue.append((packets, bytestream % i))

    duration = 0.1  # seconds
    message = '10-second data run'
    controller.run(duration, message)

    run1 = controller.reads[0]
    first_packet = run1[0]  # Packet object
    first_ten_packets = run1[0:10]  # smaller PacketCollection object

    first_packet_bits = run1[0,
                             'bits']  # string representation of bits in packet
    first_ten_packet_bits = run1[0:10, 'bits']  # list of strings

    print(run1)  # prints the contents of the packets
    print(run1[10:30])  # prints 20 packets from the middle of the run

    packet = run1[0]
    # all packets
    packet.packet_type  # unique in that it gives the bits representation
    packet.chipid  # all other properties return Python numbers
    packet.chip_key  # key for association to a unique chip
    packet.parity_bit_value
    # data packets
    packet.channel_id
    packet.dataword
    packet.timestamp
    assert packet.fifo_half_flag in (1, 0)
    assert packet.fifo_full_flag in (1, 0)
    # config packets
    packet.register_address
    packet.register_data
    # test packets
    packet.test_counter

    from larpix.logger.h5_logger import HDF5Logger
    controller.logger = HDF5Logger(
        filename=temp_logfilename, directory=str(tmpdir), buffer_length=10000
    )  # a filename of None uses the default filename formatting
    controller.logger.enable(
    )  # opens hdf5 file and starts tracking all communications

    controller.logger = HDF5Logger(filename=temp_logfilename,
                                   directory=str(tmpdir),
                                   enabled=True)

    controller.verify_configuration()
    controller.logger.flush()

    controller.logger.disable()  # stop tracking
    # any communication here is ignored
    controller.logger.enable()  # start tracking again
    controller.logger.is_enabled()  # returns True if tracking

    controller.logger.disable()

    import h5py
    datafile = h5py.File(tmpdir.join(temp_logfilename))

    assert '_header' in datafile.keys()
    assert 'packets' in datafile.keys()
    assert 'messages' in datafile.keys()
    assert list(
        datafile['_header'].attrs) == ['created', 'modified', 'version']

    raw_value = datafile['packets'][
        0]  # e.g. (b'0-246', 3, 246, 1, 1, -1, -1, -1, -1, -1, -1, 0, 0)
    raw_values = datafile['packets'][-100:]  # last 100 packets in file

    packet_repr = raw_values[0:1]
    packet_repr['chip_key']  # chip key for packet, e.g. b'1-1-246'
    packet_repr['adc_counts']  # list of ADC values for each packet
    packet_repr.dtype  # description of data type (with names of each column)

    datafile.close()
Esempio n. 30
0
def test_decode():
    test_bytes = b'\x00\x06\x05\x04\x03\x02\x01\x00'
    expected = [Packet(test_bytes[:-1])]
    assert ZMQ_IO.decode([test_bytes]) == expected