예제 #1
0
    def generate_chip_key(self, **kwargs):
        '''
        Generates a valid ``MultiZMQ_IO`` chip key

        :param chip_id: ``int`` corresponding to internal chip id

        :param io_chain: ``int`` corresponding to daisy chain number

        :param address: ``str`` corresponding to the address of the DAQ board

        '''
        req_fields = ('chip_id', 'io_chain', 'address')
        if not all([key in kwargs for key in req_fields]):
            raise ValueError('Missing fields required to generate chip id'
                             ', requires {}, received {}'.format(
                                 req_fields, kwargs.keys()))
        if not isinstance(kwargs['chip_id'], int):
            raise ValueError('chip_id must be int')
        if not isinstance(kwargs['io_chain'], int):
            raise ValueError('io_chain must be int')
        if not isinstance(kwargs['address'], str):
            raise ValueError('address must be str')
        if kwargs['address'] not in self._io_group_table.inv:
            raise KeyError('no known io group for {}'.format(
                kwargs['address']))
        io_channel = kwargs['io_chain']
        if io_channel in self._miso_map:
            io_channel = self._miso_map[io_channel]
        return Key.from_dict(
            dict(io_group=self._io_group_table.inv[kwargs['address']],
                 io_channel=io_channel,
                 chip_id=kwargs['chip_id']))
예제 #2
0
def test_generate_chip_key(zmq_io_obj):
    chip_id = 125
    io_chain = 2
    io_group = 1
    expected = Key('{}-{}-{}'.format(io_group, io_chain, chip_id))
    assert zmq_io_obj.generate_chip_key(chip_id=chip_id,
                                        io_chain=io_chain) == expected
예제 #3
0
def test_decode(zmq_io_obj):
    io_chain = 1
    test_packet = Packet(b'\x01' * Packet.num_bytes)
    test_packet.chip_key = Key(1, io_chain, test_packet.chip_id)
    test_bytes = dataserver_message_encode([test_packet])
    expected = [test_packet]
    assert zmq_io_obj.decode(test_bytes) == expected
예제 #4
0
def test_decode(zmq_io_obj):
    chip_id = 64
    io_chain = 1
    test_packet = Packet(b'\x00\x01\x02\x03\x04\x05\x06')
    test_packet.chip_key = Key(1, io_chain, chip_id)
    test_bytes = dataserver_message_encode([test_packet])
    expected = [test_packet]
    assert zmq_io_obj.decode(test_bytes) == expected
예제 #5
0
def test_encode(zmq_io_obj):
    chip_id = 64
    io_chain = 1
    test_packet = Packet(b'\x00\x01\x02\x03\x04\x05\x06')
    test_packet.chip_key = Key(1, io_chain, chip_id)
    test_bytes = b'0x0006050403020100 1'
    expected = [test_bytes]
    assert zmq_io_obj.encode([test_packet]) == expected
예제 #6
0
def test_encode(zmq_io_obj):
    io_chain = 1
    test_packet = Packet(b'\x01' * Packet.num_bytes)
    test_packet.chip_key = Key(1, io_chain, test_packet.chip_id)
    test_bytes = b'0x0101010101010101 1'
    if isinstance(test_packet, Packet_v1):
        test_bytes = b'0x0001010101010101 1'
    expected = [test_bytes]
    assert zmq_io_obj.encode([test_packet]) == expected
예제 #7
0
def config_read_packet_v2():
    p = Packet_v2()
    p.packet_type = Packet_v2.CONFIG_READ_PACKET
    p.chip_id = 123
    p.register_address = 10
    p.register_data = 23
    p.assign_parity()
    p.chip_key = Key('1-2-123')
    p.direction = 1
    return p
예제 #8
0
def test_parse_chip_key(zmq_io_obj):
    chip_id = 62
    io_chain = 2
    io_group = 1
    expected = {
        'chip_id': chip_id,
        'io_chain': io_chain,
        'address': zmq_io_obj._address
    }
    key = Key('{}-{}-{}'.format(io_group, io_chain, chip_id))
    assert zmq_io_obj.parse_chip_key(key) == expected
예제 #9
0
def data_packet():
    p = Packet_v1()
    p.packet_type = Packet_v1.DATA_PACKET
    p.chipid = 123
    p.channel = 7
    p.timestamp = 123456
    p.dataword = 120
    p.fifo_half_flag = 1
    p.assign_parity()
    p.chip_key = Key('1-2-123')
    p.direction = 1
    return p
예제 #10
0
def _parse_configs_v2_4(row, asic_version, *args, **kwargs):
    key = Key(row['io_group'],row['io_channel'],row['chip_id'])
    if asic_version in ('1','2'):
        c = Chip(key,version=int(asic_version))
    else:
        c = Chip(key,version=asic_version)
    d = dict()
    for i in range(c.config.num_registers):
        d[i] = row['registers'][i]
    endian = 'big' if asic_version == '1' else 'little'
    c.config.from_dict_registers(d, endian=endian)
    return c
예제 #11
0
def data_packet_v2():
    p = Packet_v2()
    p.packet_type = Packet_v2.DATA_PACKET
    p.chip_id = 123
    p.channel_id = 7
    p.timestamp = 123456
    p.dataword = 120
    p.shared_fifo = 1
    p.assign_parity()
    p.chip_key = Key('1-2-123')
    p.direction = 1
    p.receipt_timestamp = 123456
    return p
예제 #12
0
def fifo_diagnostics_packet_v2():
    p = Packet_v2()
    p.enable_fifo_diagnostics = True
    p.packet_type = Packet_v2.DATA_PACKET
    p.chip_id = 123
    p.channel_id = 7
    p.timestamp = 123456
    p.dataword = 120
    p.shared_fifo = 1
    p.shared_fifo_events = 5
    p.local_fifo_events = 1
    p.assign_parity()
    p.chip_key = Key('1-2-123')
    p.direction = 1
    return p
예제 #13
0
def test_to_file_v2_0_config_read_packet(tmpfile, config_read_packet_v2):
    to_file(tmpfile, [config_read_packet_v2], version='2.0')
    f = h5py.File(tmpfile, 'r')
    assert len(f['packets']) == 1
    row = f['packets'][0]
    props = dtype_property_index_lookup['2.0']['packets']
    new_packet = Packet_v2()
    new_packet.chip_key = Key(row[props['io_group']], row[props['io_channel']],
                              row[props['chip_id']])
    new_packet.packet_type = row[props['packet_type']]
    new_packet.parity = row[props['parity']]
    new_packet.register_address = row[props['register_address']]
    new_packet.register_data = row[props['register_data']]
    new_packet.direction = row[props['direction']]
    print(new_packet)
    print(config_read_packet_v2)
    assert new_packet == config_read_packet_v2
예제 #14
0
def test_to_file_v2_0_data_packet(tmpfile, data_packet_v2):
    to_file(tmpfile, [data_packet_v2], version='2.0')
    f = h5py.File(tmpfile, 'r')
    assert len(f['packets']) == 1
    row = f['packets'][0]
    props = dtype_property_index_lookup['2.0']['packets']
    new_packet = Packet_v2()
    new_packet.chip_key = Key(row[props['io_group']], row[props['io_channel']],
                              row[props['chip_id']])
    new_packet.packet_type = row[props['packet_type']]
    new_packet.trigger_type = row[props['trigger_type']]
    new_packet.chip_id = row[props['chip_id']]
    new_packet.parity = row[props['parity']]
    new_packet.channel_id = row[props['channel_id']]
    new_packet.timestamp = row[props['timestamp']]
    new_packet.dataword = row[props['dataword']]
    new_packet.local_fifo = row[props['local_fifo']]
    new_packet.shared_fifo = row[props['shared_fifo']]
    new_packet.direction = row[props['direction']]
    assert new_packet.timestamp == data_packet_v2.timestamp
    assert new_packet == data_packet_v2
예제 #15
0
    def generate_chip_key(self, **kwargs):
        '''
        Generates a valid ``ZMQ_IO`` chip key

        :param chip_id: ``int`` corresponding to internal chip id

        :param io_chain: ``int`` corresponding to daisy chain number

        '''
        req_fields = ('chip_id', 'io_chain')
        if not all([key in kwargs for key in req_fields]):
            raise ValueError('Missing fields required to generate chip id'
                ', requires {}, received {}'.format(req_fields, kwargs.keys()))
        io_channel = kwargs['io_chain']
        if io_channel in self._miso_map:
            io_channel = self._miso_map[io_channel]
        return Key.from_dict(dict(
                io_channel = io_channel,
                chip_id = kwargs['chip_id'],
                io_group = self._io_group_table.inv[self._address]
            ))
예제 #16
0
def test_tutorial(capsys, tmpdir, temp_logfilename):
    from larpix.larpix import Controller, Packet

    from larpix.io.fakeio import FakeIO
    from larpix.logger.stdout_logger import StdoutLogger
    controller = Controller()
    controller.io = FakeIO()
    controller.logger = StdoutLogger(buffer_length=0)
    controller.logger.enable()

    chip_key = '1-1-5'
    chip5 = controller.add_chip(chip_key)
    chip5 = controller.get_chip(chip_key)

    from larpix.larpix import Key
    example_key = Key('1-2-3')

    assert example_key.io_group == 1
    assert example_key.io_channel == 2
    assert example_key.chip_id == 3
    example_key.to_dict()

    chip5.config.global_threshold = 35  # entire register = 1 number
    chip5.config.periodic_reset = 1  # one bit as part of a register
    chip5.config.channel_mask[20] = 1  # one bit per channel

    controller.write_configuration(chip_key)  # send all registers
    controller.write_configuration(chip_key, 32)  # send only register 32
    controller.write_configuration(chip_key,
                                   [32, 50])  # send registers 32 and 50

    global_threshold_reg = chip5.config.global_threshold_address

    packets = chip5.get_configuration_packets(Packet.CONFIG_READ_PACKET)
    bytestream = b'bytes for the config read packets'
    controller.io.queue.append((packets, bytestream))

    controller.read_configuration(chip_key)

    packets = [Packet()] * 40
    bytestream = b'bytes from the first set of packets'
    controller.io.queue.append((packets, bytestream))
    packets2 = [Packet()] * 30
    bytestream2 = b'bytes from the second set of packets'
    controller.io.queue.append((packets2, bytestream2))

    controller.start_listening()
    # Data arrives...
    packets, bytestream = controller.read()
    # More data arrives...
    packets2, bytestream2 = controller.read()
    controller.stop_listening()
    message = 'First data arrived!'
    message2 = 'More data arrived!'
    controller.store_packets(packets, bytestream, message)
    controller.store_packets(packets, bytestream2, message2)

    packets = [Packet()] * 5
    bytestream = b'[bytes from read #%d] '
    for i in range(100):
        controller.io.queue.append((packets, bytestream % i))

    duration = 0.1  # seconds
    message = '10-second data run'
    controller.run(duration, message)

    run1 = controller.reads[0]
    first_packet = run1[0]  # Packet object
    first_ten_packets = run1[0:10]  # smaller PacketCollection object

    first_packet_bits = run1[0,
                             'bits']  # string representation of bits in packet
    first_ten_packet_bits = run1[0:10, 'bits']  # list of strings

    print(run1)  # prints the contents of the packets
    print(run1[10:30])  # prints 20 packets from the middle of the run

    packet = run1[0]
    # all packets
    packet.packet_type  # unique in that it gives the bits representation
    packet.chipid  # all other properties return Python numbers
    packet.chip_key  # key for association to a unique chip
    packet.parity_bit_value
    # data packets
    packet.channel_id
    packet.dataword
    packet.timestamp
    assert packet.fifo_half_flag in (1, 0)
    assert packet.fifo_full_flag in (1, 0)
    # config packets
    packet.register_address
    packet.register_data
    # test packets
    packet.test_counter

    from larpix.logger.h5_logger import HDF5Logger
    controller.logger = HDF5Logger(
        filename=temp_logfilename, directory=str(tmpdir), buffer_length=10000
    )  # a filename of None uses the default filename formatting
    controller.logger.enable(
    )  # opens hdf5 file and starts tracking all communications

    controller.logger = HDF5Logger(filename=temp_logfilename,
                                   directory=str(tmpdir),
                                   enabled=True)

    controller.verify_configuration()
    controller.logger.flush()

    controller.logger.disable()  # stop tracking
    # any communication here is ignored
    controller.logger.enable()  # start tracking again
    controller.logger.is_enabled()  # returns True if tracking

    controller.logger.disable()

    import h5py
    datafile = h5py.File(tmpdir.join(temp_logfilename))

    assert '_header' in datafile.keys()
    assert 'packets' in datafile.keys()
    assert 'messages' in datafile.keys()
    assert list(
        datafile['_header'].attrs) == ['created', 'modified', 'version']

    raw_value = datafile['packets'][
        0]  # e.g. (b'0-246', 3, 246, 1, 1, -1, -1, -1, -1, -1, -1, 0, 0)
    raw_values = datafile['packets'][-100:]  # last 100 packets in file

    packet_repr = raw_values[0:1]
    packet_repr['chip_key']  # chip key for packet, e.g. b'1-1-246'
    packet_repr['adc_counts']  # list of ADC values for each packet
    packet_repr.dtype  # description of data type (with names of each column)

    datafile.close()