Beispiel #1
0
def process_flashcam(t0_file, t1_file, run, n_max, decoders, config, verbose):
    """
    decode FlashCam data, using the fcutils package to handle file access,
    and the FlashCam DataTaker to save the results and write to output.
    """
    import fcutils

    fcio = fcutils.fcio(t0_file)
    decoder = FlashCam()
    decoder.get_file_config(fcio)
    
    # ROW_LIMIT = 5e4
    ROW_LIMIT = 1000

    # loop over raw data packets
    i_debug = 0
    packet_id = 0
    while fcio.next_event() and packet_id < n_max:
      packet_id += 1
      if verbose and packet_id % 1000 == 0:
          update_progress(float(fcio.telid) / file_size)
      
      # write periodically to the output file
      if packet_id % ROW_LIMIT == 0:
          
          # decoder.save_to_pytables(t1_file, verbose=True)
          
          decoder.save_to_lh5(t1_file)
          
          i_debug += 1
          if i_debug == 1:
              print("breaking early")
              break # debug, deleteme

      decoder.decode_event(fcio, packet_id)

    # end of loop, write to file once more
    # decoder.save_to_pytables(t1_file, verbose=True)
    # decoder.save_to_lh5(t1_file, verbose=True)

    if verbose:
      update_progress(1)
Beispiel #2
0
def process_flashcam(daq_file,
                     raw_files,
                     n_max,
                     ch_groups_dict=None,
                     verbose=False,
                     buffer_size=8192,
                     chans=None,
                     f_out=''):
    """
    decode FlashCam data, using the fcutils package to handle file access,
    and the FlashCam DataTaker to save the results and write to output.

    `raw_files` can be a string, or a dict with a label for each file:
      `{'geds':'filename_geds.lh5', 'muvt':'filename_muvt.lh5}`
    """
    import fcutils

    if isinstance(raw_files, str):
        single_output = True
        f_out = raw_files
    elif len(raw_files) == 1:
        single_output = True
        f_out = list(raw_files.values())[0]
    else:
        single_output = False

    fcio = fcutils.fcio(daq_file)

    # set up event decoder
    event_decoder = FlashCamEventDecoder()
    event_decoder.set_file_config(fcio)
    event_tables = {}

    # build ch_groups and set up tables
    ch_groups = None
    if (ch_groups_dict is not None) and ('FlashCamEventDecoder'
                                         in ch_groups_dict):
        # get ch_groups
        ch_groups = ch_groups_dict['FlashCamEventDecoder']
        expand_ch_groups(ch_groups)
    else:
        print('Config not found.  Single-table mode')
        ch_groups = create_dummy_ch_group()

    # set up ch_group-to-output-file-and-group info
    if single_output:
        set_outputs(ch_groups,
                    out_file_template=f_out,
                    grp_path_template='{group_name}/raw')
    else:
        set_outputs(ch_groups,
                    out_file_template=raw_files,
                    grp_path_template='{group_name}/raw')

    # set up tables
    event_tables = build_tables(ch_groups, buffer_size, event_decoder)

    if verbose:
        print('Output group : output file')
        for group_info in ch_groups.values():
            group_path = group_info['group_path']
            out_file = group_info['out_file']
            print(group_path, ':', out_file.split('/')[-1])

    # dictionary with the unique file names as keys
    file_info = dict.fromkeys(
        set(group_info['out_file'] for group_info in ch_groups.values()),
        False)

    # set up status decoder (this is 'auxs' output)
    status_decoder = FlashCamStatusDecoder()
    status_decoder.set_file_config(fcio)
    status_tbl = lh5.Table(buffer_size)
    status_decoder.initialize_lh5_table(status_tbl)
    try:
        status_filename = f_out if single_output else raw_files['auxs']
        config_filename = f_out if single_output else raw_files['auxs']
    except:
        status_filename = "fcio_status"
        config_filename = "fcio_config"

    # Set up the store
    # TODO: add overwrite capability
    lh5_store = lh5.Store()

    # write fcio_config
    fcio_config = event_decoder.get_file_config_struct()
    lh5_store.write_object(fcio_config, 'fcio_config', config_filename)

    # loop over raw data packets
    i_debug = 0
    packet_id = 0
    rc = 1
    bytes_processed = 0
    bytes_per_loop = 0
    file_size = os.path.getsize(daq_file)
    max_numtraces = 0

    unit = "B"
    n_entries = 0
    if n_max < np.inf and n_max > 0:
        n_entries = n_max
        unit = "id"
    else:
        n_entries = file_size
    progress_bar = tqdm_range(0,
                              int(n_entries),
                              text="Processing",
                              verbose=verbose,
                              unit=unit)
    while rc and packet_id < n_max:
        rc = fcio.get_record()

        # Skip non-interesting records
        # FIXME: push to a buffer of skipped packets?
        if rc == 0 or rc == 1 or rc == 2 or rc == 5: continue

        packet_id += 1

        # Status record
        if rc == 4:
            bytes_per_loop = status_decoder.decode_packet(
                fcio, status_tbl, packet_id)
            bytes_processed += bytes_per_loop
            if status_tbl.is_full():
                lh5_store.write_object(status_tbl,
                                       'fcio_status',
                                       status_filename,
                                       n_rows=status_tbl.size)
                status_tbl.clear()

        # Event or SparseEvent record
        if rc == 3 or rc == 6:
            for group_info in ch_groups.values():
                tbl = group_info['table']
                # Check that the tables are large enough
                # TODO: don't need to check this every event, only if sum(numtraces) >= buffer_size
                if tbl.size < fcio.numtraces and fcio.numtraces > max_numtraces:
                    print('warning: tbl.size =', tbl.size,
                          'but fcio.numtraces =', fcio.numtraces)
                    print('may overflow. suggest increasing tbl.size')
                    max_numtraces = fcio.numtraces
                # Pre-emptively clear tables if it might be necessary
                if tbl.size - tbl.loc < fcio.numtraces:  # might overflow
                    group_path = group_info['group_path']
                    out_file = group_info['out_file']
                    lh5_store.write_object(tbl,
                                           group_path,
                                           out_file,
                                           n_rows=tbl.loc)
                    if out_file in file_info: file_info[out_file] = True
                    tbl.clear()

            # Looks okay: just decode
            bytes_per_loop = event_decoder.decode_packet(
                fcio, event_tables, packet_id)
            bytes_processed += bytes_per_loop

            if verbose:
                update_len = 0
                if n_max < np.inf and n_max > 0:
                    update_len = 1
                else:
                    update_len = bytes_per_loop
                update_progress(progress_bar, update_len)

            # i_debug += 1
            # if i_debug == 10:
            #    print("breaking early")
            #    break # debug, deleteme

    # end of loop, write to file once more
    for group_info in ch_groups.values():
        tbl = group_info['table']
        if tbl.loc != 0:
            group_path = group_info['group_path']
            out_file = group_info['out_file']
            lh5_store.write_object(tbl, group_path, out_file, n_rows=tbl.loc)
            if out_file in file_info: file_info[out_file] = True
            tbl.clear()
    if status_tbl.loc != 0:
        lh5_store.write_object(status_tbl,
                               'stat',
                               status_filename,
                               n_rows=status_tbl.loc)
        status_tbl.clear()

    # alert user to any files not actually saved in the end
    for out_file, is_saved in file_info.items():
        if not is_saved:
            print('Not saving file since no data were found:', out_file)

    if verbose:
        print(packet_id, 'packets decoded')

    if len(event_decoder.skipped_channels) > 0:
        print("Warning - daq_to_raw skipped some channels in file")
        if verbose:
            for ch, n in event_decoder.skipped_channels.items():
                print("  ch", ch, ":", n, "hits")

    return bytes_processed
Beispiel #3
0
from matplotlib import pyplot
import numpy as np
from fcutils import fcio

# The fcio class is used to open the datafile
io = fcio("./th228.fcio")

print("Number of adcs", io.nadcs)
print("Number of samples", io.nsamples)

# Some test parameters
energy_thrs = 0
max_event = 5
status_only = 0
no_status = 0

data = io.traces
rec = 1

while (rec):
    rec = io.get_record()
    if rec == 0 or rec == 1 or rec == 2 or rec == 5: continue
    if no_status == 0 and rec == 4:
        print("Status:",
              io.status,
              io.statustime[0] + io.statustime[1] / 1e6,
              end=' sec ')
        print(io.statustime[2] + io.statustime[3] / 1e6, end=' sec ')
        print(io.cards, end=' cards ')
        print(io.size, end=' ')
        for it in range(0, len(io.environment)):