def test_to_file_v1_0_many_packets(tmpfile, data_packet, config_read_packet,
        timestamp_packet, message_packet):
    to_file(tmpfile, [data_packet, config_read_packet,
        timestamp_packet, message_packet], version='1.0')
    f = h5py.File(tmpfile, 'r')
    assert len(f['packets']) == 4
    assert len(f['messages']) == 1
예제 #2
0
    def enable(self):
        '''
        Enable the logger and set up output file.

        If the file already exists then data will be appended to the end of arrays.

        :param enable: ``True`` if you want to enable the logger after
            initializing (Optional, default=``True``)
        '''
        super(HDF5Logger, self).enable()
        to_file(self.filename, [], version=self.version)
예제 #3
0
    def record_configs(self, chips):
        '''
        Write the specified chip configurations to the log file

        .. note:: this method will also flush any data in the buffer to the log file

        :param chips: list of chips to record timestamps

        '''
        self.flush(block=True)
        to_file(self.filename, chip_list=chips, version=self.version)
예제 #4
0
 def _writer(self):
     try:
         while True:
             packets = self._worker_queue.get(timeout=1)
             to_file(self.filename, packets, version=self.version)
             self._worker_queue.task_done()
     except Empty:
         pass
     except:
         print('HDF5Logger IO thread error!')
         raise
     finally:
         self._worker = None
def test_to_file_v2_4_chips(tmpfile, chip):
    chips = [copy.deepcopy(chip) for i in range(10)]
    for i, chip in enumerate(chips):
        chip.chip_id = i
    chips[0].config.pixel_trim_dac[12] = 0
    chips[1].config.threshold_global = 1
    to_file(tmpfile, chip_list=chips, version='2.4')
    new_chips = from_file(tmpfile, load_configs=True)['configs']
    assert [chip.chip_key
            for chip in chips] == [chip.chip_key for chip in new_chips]
    assert [chip.config
            for chip in chips] == [chip.config for chip in new_chips]
    assert new_chips[0].config.pixel_trim_dac[12] == chips[
        0].config.pixel_trim_dac[12]
    assert new_chips[1].config.threshold_global == chips[
        1].config.threshold_global

    new_chips = from_file(tmpfile, load_configs=slice(1, 4))['configs']
    assert len(new_chips) == 3
    assert new_chips[0].chip_key == chips[1].chip_key
def main(input_filename, output_filename, block_size):
    total_messages = len_rawfile(input_filename)
    total_blocks = total_messages // block_size + 1
    last = time.time()
    for i_block in range(total_blocks):
        start = i_block * block_size
        end = min(start + block_size, total_messages)
        if start == end: return

        if time.time() > last + 1:
            print('reading block {} of {}...\r'.format(i_block + 1,
                                                       total_blocks),
                  end='')
            last = time.time()
        rd = from_rawfile(input_filename, start=start, end=end)
        pkts = list()
        for i_msg, data in enumerate(
                zip(rd['msg_headers']['io_groups'], rd['msgs'])):
            io_group, msg = data
            pkts.extend(parse(msg, io_group=io_group))
        to_file(output_filename, packet_list=pkts)
    print()
예제 #7
0
파일: fee.py 프로젝트: DUNE/larnd-sim
def export_to_hdf5(adc_list, adc_ticks_list, unique_pix, track_ids, filename):
    """
    Saves the ADC counts in the LArPix HDF5 format.

    Args:
        adc_list (:obj:`numpy.ndarray`): list of ADC values for each pixel
        adc_ticks_list (:obj:`numpy.ndarray`): list of time ticks for each pixel
        unique_pix (:obj:`numpy.ndarray`): list of pixel IDs
        filename (str): filename of HDF5 output file

    Returns:
        list: list of LArPix packets
    """

    dtype = np.dtype([('track_ids','(5,)i8')])
    packets = [TimestampPacket()]
    packets_mc = [[-1]*5]
    packets_mc_ds = []
    last_event = -1
    
    for itick, adcs in enumerate(tqdm(adc_list, desc="Writing to HDF5...")):
        ts = adc_ticks_list[itick]
        pixel_id = unique_pix[itick]

        plane_id = int(pixel_id[0] // consts.n_pixels[0])
        tile_x = int((pixel_id[0] - consts.n_pixels[0] * plane_id) // consts.n_pixels_per_tile[1])
        tile_y = int(pixel_id[1] // consts.n_pixels_per_tile[1])
        tile_id = consts.tile_map[plane_id][tile_x][tile_y]

        for iadc, adc in enumerate(adcs):
            t = ts[iadc]

            if adc > digitize(0):
                event = t // (consts.time_interval[1]*3)
                time_tick = int(np.floor(t/CLOCK_CYCLE))

                if event != last_event:
                    packets.append(TriggerPacket(io_group=1,trigger_type=b'\x02',timestamp=int(event*consts.time_interval[1]/consts.t_sampling*3)))
                    packets_mc.append([-1]*5)
                    packets.append(TriggerPacket(io_group=2,trigger_type=b'\x02',timestamp=int(event*consts.time_interval[1]/consts.t_sampling*3)))
                    packets_mc.append([-1]*5)
                    last_event = event
                
                p = Packet_v2()

                try:
                    chip, channel = consts.pixel_connection_dict[rotate_tile(pixel_id%70, tile_id)]
                except KeyError:
                    print("Pixel ID not valid", pixel_id)
                    continue
                
                # disabled channels near the borders of the tiles
                if chip in top_row_chip_ids and channel in top_row_channels: continue
                if chip in bottom_row_chip_ids and channel in bottom_row_channels: continue
                if chip in inner_edge_chip_ids and channel in inner_edge_channels: continue 
                    
                p.dataword = int(adc)
                p.timestamp = time_tick

                try:
                    io_group_io_channel = consts.tile_chip_to_io[tile_id][chip]
                except KeyError:
#                     print("Chip %i on tile %i not found" % (chip, tile_id))
                    continue
                    
                io_group, io_channel = io_group_io_channel // 1000, io_group_io_channel % 1000
                p.chip_key = "%i-%i-%i" % (io_group, io_channel, chip)
                p.channel_id = channel
                p.packet_type = 0
                p.first_packet = 1
                p.assign_parity()

                packets_mc.append(track_ids[itick][iadc])
                packets.append(p)
            else:
                break
        
    packet_list = PacketCollection(packets, read_id=0, message='')
    
    hdf5format.to_file(filename, packet_list)

    if packets:
        packets_mc_ds = np.empty(len(packets), dtype=dtype)
        packets_mc_ds['track_ids'] = packets_mc

    with h5py.File(filename, 'a') as f:
        if "mc_packets_assn" in f.keys():
            del f['mc_packets_assn']
        f.create_dataset("mc_packets_assn", data=packets_mc_ds)

    return packets, packets_mc_ds
예제 #8
0
 def flush(self):
     '''
     Flushes any held data to the output file
     '''
     to_file(self.filename, self._buffer['packets'], version=self.version)
     self._buffer['packets'] = []
예제 #9
0
파일: fee.py 프로젝트: maxpkatz/larnd-sim
def export_to_hdf5(adc_list, adc_ticks_list, unique_pix, track_ids, filename):
    """
    Saves the ADC counts in the LArPix HDF5 format.

    Args:
        adc_list (:obj:`numpy.ndarray`): list of ADC values for each pixel
        adc_ticks_list (:obj:`numpy.ndarray`): list of time ticks for each pixel
        unique_pix (:obj:`numpy.ndarray`): list of pixel IDs
        filename (str): filename of HDF5 output file

    Returns:
        list: list of LArPix packets
    """

    dtype = np.dtype([('track_ids','(5,)i8')])
    packets = {}
    packets_mc = {}
    packets_mc_ds = {}
    
    for ic in range(consts.tpc_centers.shape[0]):
        packets[ic] = []
        packets_mc[ic] = []
        packets_mc_ds[ic] = []
        
    for itick, adcs in enumerate(tqdm(adc_list, desc="Writing to HDF5...")):
        ts = adc_ticks_list[itick]
        pixel_id = unique_pix[itick]
        plane_id = pixel_id[0] // consts.n_pixels[0]
        pix_x, pix_y = detsim.get_pixel_coordinates(pixel_id)
        
        try:
            pix_x -= consts.tpc_centers[int(plane_id)][0]
            pix_y -= consts.tpc_centers[int(plane_id)][1]
        except IndexError:
            print("Pixel (%i, %i) outside the TPC borders" % (pixel_id[0], pixel_id[1]))

        pix_x *= consts.cm2mm
        pix_y *= consts.cm2mm

        for iadc, adc in enumerate(adcs):
            t = ts[iadc]

            if adc > digitize(0):
                p = Packet_v2()

                try:
                    channel, chip = consts.pixel_connection_dict[(round(pix_x/consts.pixel_size[0]),round(pix_y/consts.pixel_size[1]))]
                except KeyError:
                    print("Pixel coordinates not valid", pix_x, pix_y, pixel_id, adc)
                    continue

                p.dataword = int(adc)
                p.timestamp = int(np.floor(t/CLOCK_CYCLE))
            
                if isinstance(chip, int):
                    p.chip_id = chip
                else:
                    p.chip_key = chip

                p.channel_id = channel
                p.packet_type = 0
                p.first_packet = 1
                p.assign_parity()
                        
                if not packets[plane_id]:
                    packets[plane_id].append(TimestampPacket())
                    packets_mc[plane_id].append([-1]*5)
                    
                packets_mc[plane_id].append(track_ids[itick][iadc])
                packets[plane_id].append(p)
            else:
                break

    for ipc in packets:
        packet_list = PacketCollection(packets[ipc], read_id=0, message='')

        if len(packets.keys()) > 1:
            if "." in filename:
                pre_extension, post_extension = filename.rsplit('.', 1)
                filename_ext = "%s-%i.%s" % (pre_extension, ipc, post_extension)
            else:
                filename_ext = "%s-%i" % (filename, ipc)
        else:
            filename_ext = filename

        hdf5format.to_file(filename_ext, packet_list)
        if len(packets[ipc]):
            packets_mc_ds[ipc] = np.empty(len(packets[ipc]), dtype=dtype)
            packets_mc_ds[ipc]['track_ids'] = packets_mc[ipc]

        with h5py.File(filename_ext, 'a') as f:
            if "mc_packets_assn" in f.keys():
                del f['mc_packets_assn']
            f.create_dataset("mc_packets_assn", data=packets_mc_ds[ipc])

    return packets, packets_mc
예제 #10
0
def export_to_hdf5(event_id_list,
                   adc_list,
                   adc_ticks_list,
                   unique_pix,
                   current_fractions,
                   track_ids,
                   filename,
                   t0=0,
                   bad_channels=None):
    """
    Saves the ADC counts in the LArPix HDF5 format.
    Args:
        event_id_list (:obj:`numpy.ndarray`): list of event ids for each ADC value for each pixel
        adc_list (:obj:`numpy.ndarray`): list of ADC values for each pixel
        adc_ticks_list (:obj:`numpy.ndarray`): list of time ticks for each pixel
        unique_pix (:obj:`numpy.ndarray`): list of pixel IDs
        current_fractions (:obj:`numpy.ndarray`): array containing the fractional current
            induced by each track on each pixel
        track_ids (:obj:`numpy.ndarray`): 2D array containing the track IDs associated
            to each pixel
        filename (str): filename of HDF5 output file
        bad_channels (dict): dictionary containing as value a list of bad channels and as
            the chip key
    Returns:
        tuple: a tuple containing the list of LArPix packets and the list of entries for the `mc_packets_assn` dataset
    """
    dtype = np.dtype([('track_ids', '(%i,)i8' % track_ids.shape[1]),
                      ('fraction', '(%i,)f8' % current_fractions.shape[2])])

    io_groups = np.unique([
        v // 1000 for d in detector.TILE_CHIP_TO_IO.values()
        for v in d.values()
    ])
    packets = []
    packets_mc = []
    packets_frac = []

    if t0 == 0:
        packets.append(TimestampPacket())
        packets_mc.append([-1] * track_ids.shape[1])
        packets_frac.append([0] * current_fractions.shape[2])
        for io_group in io_groups:
            packets.append(
                SyncPacket(sync_type=b'S', timestamp=0, io_group=io_group))
            packets_mc.append([-1] * track_ids.shape[1])
            packets_frac.append([0] * current_fractions.shape[2])

    packets_mc_ds = []
    last_event = -1

    if bad_channels:
        with open(bad_channels, 'r') as bad_channels_file:
            bad_channels_list = yaml.load(bad_channels_file,
                                          Loader=yaml.FullLoader)

    unique_events, unique_events_inv = np.unique(event_id_list[..., 0],
                                                 return_inverse=True)
    event_start_time = np.random.exponential(
        scale=EVENT_RATE, size=unique_events.shape).astype(int)
    event_start_time = np.cumsum(event_start_time)
    event_start_time += t0
    event_start_time_list = event_start_time[unique_events_inv]

    rollover_count = 0
    for itick, adcs in enumerate(adc_list):
        ts = adc_ticks_list[itick]
        pixel_id = unique_pix[itick]

        pix_x, pix_y, plane_id = id2pixel(pixel_id)
        module_id = plane_id // 2 + 1

        if module_id not in detector.MODULE_TO_IO_GROUPS.keys():
            logger.warning("Pixel ID not valid %i" % module_id)
            continue

        tile_x = int(pix_x // detector.N_PIXELS_PER_TILE[0])
        tile_y = int(pix_y // detector.N_PIXELS_PER_TILE[1])
        anode_id = 0 if plane_id % 2 == 0 else 1
        tile_id = detector.TILE_MAP[anode_id][tile_x][tile_y]

        for iadc, adc in enumerate(adcs):
            t = ts[iadc]

            if adc > digitize(0):
                while True:
                    event = event_id_list[itick, iadc]
                    event_t0 = event_start_time_list[itick]
                    time_tick = int(np.floor(t / CLOCK_CYCLE + event_t0))
                    if event_t0 > 2**31 - 1 or time_tick > 2**31 - 1:
                        # 31-bit rollover
                        rollover_count += 1
                        packets.append(
                            TimestampPacket(
                                timestamp=floor(rollover_count * (2**31) *
                                                CLOCK_CYCLE * 1e-6)))
                        packets_mc.append([-1] * track_ids.shape[1])
                        packets_frac.append([0] * current_fractions.shape[2])
                        for io_group in io_groups:
                            packets.append(
                                SyncPacket(sync_type=b'S',
                                           timestamp=(2**31),
                                           io_group=io_group))
                            packets_mc.append([-1] * track_ids.shape[1])
                            packets_frac.append([0] *
                                                current_fractions.shape[2])
                        event_start_time_list[itick:] -= 2**31
                    else:
                        break

                event_t0 = event_t0 % (2**31)
                time_tick = time_tick % (2**31)

                if event != last_event:
                    for io_group in io_groups:
                        packets.append(
                            TriggerPacket(io_group=io_group,
                                          trigger_type=b'\x02',
                                          timestamp=event_t0))
                        packets_mc.append([-1] * track_ids.shape[1])
                        packets_frac.append([0] * current_fractions.shape[2])
                    last_event = event

                p = Packet_v2()

                try:
                    chip, channel = detector.PIXEL_CONNECTION_DICT[rotate_tile(
                        (pix_x % detector.N_PIXELS_PER_TILE[0],
                         pix_y % detector.N_PIXELS_PER_TILE[1]), tile_id)]
                except KeyError:
                    logger.warning("Pixel ID not valid %i" % pixel_id)
                    continue

                p.dataword = int(adc)
                p.timestamp = time_tick

                try:
                    io_group_io_channel = detector.TILE_CHIP_TO_IO[tile_id][
                        chip]
                except KeyError:
                    logger.info(f"Chip {chip} on tile {tile_id} not found")
                    continue

                io_group, io_channel = io_group_io_channel // 1000, io_group_io_channel % 1000
                io_group = detector.MODULE_TO_IO_GROUPS[module_id][io_group -
                                                                   1]
                chip_key = "%i-%i-%i" % (io_group, io_channel, chip)

                if bad_channels:
                    if chip_key in bad_channels_list:
                        if channel in bad_channels_list[chip_key]:
                            logger.info(
                                f"Channel {channel} on chip {chip_key} disabled"
                            )
                            continue

                p.chip_key = chip_key
                p.channel_id = channel
                p.receipt_timestamp = time_tick
                p.packet_type = 0
                p.first_packet = 1
                p.assign_parity()

                packets_mc.append(track_ids[itick])
                packets_frac.append(current_fractions[itick][iadc])
                packets.append(p)
            else:
                break

    if packets:
        packet_list = PacketCollection(packets, read_id=0, message='')
        hdf5format.to_file(filename, packet_list)
        packets_mc_ds = np.empty(len(packets), dtype=dtype)
        packets_mc_ds['track_ids'] = packets_mc
        packets_mc_ds['fraction'] = packets_frac

        with h5py.File(filename, 'a') as f:
            if t0 == 0:
                f.create_dataset("mc_packets_assn",
                                 data=packets_mc_ds,
                                 maxshape=(None, ))
            else:
                f['mc_packets_assn'].resize(
                    (f['mc_packets_assn'].shape[0] + packets_mc_ds.shape[0]),
                    axis=0)
                f['mc_packets_assn'][-packets_mc_ds.shape[0]:] = packets_mc_ds

            f['configs'].attrs['vdrift'] = detector.V_DRIFT
            f['configs'].attrs['long_diff'] = detector.LONG_DIFF
            f['configs'].attrs['tran_diff'] = detector.TRAN_DIFF
            f['configs'].attrs['lifetime'] = detector.ELECTRON_LIFETIME
            f['configs'].attrs['drift_length'] = detector.DRIFT_LENGTH

    return packets, packets_mc_ds, event_start_time_list[-1]