Пример #1
0
def hist_writer(file,
                *,
                group_name: 'options: HIST, HIST2D',
                table_name: 'options: pmt, pmtMAU, sipm, sipmMAU',
                compression='ZLIB4',
                n_sensors: 'number of pmts or sipms',
                bin_centres: 'np.array of bin centres'):
    try:
        hist_group = getattr(file.root, group_name)
    except tb.NoSuchNodeError:
        hist_group = file.create_group(file.root, group_name)

    n_bins = len(bin_centres)

    hist_table = file.create_earray(hist_group,
                                    table_name,
                                    atom=tb.Int32Atom(),
                                    shape=(0, n_sensors, n_bins),
                                    filters=tbl.filters(compression))

    ## The bins can be written just once at definition of the writer
    file.create_array(hist_group, table_name + '_bins', bin_centres)

    def write_hist(histo: 'np.array of histograms, one for each sensor'):
        hist_table.append(histo.reshape(1, n_sensors, n_bins))

    return write_hist
Пример #2
0
    def write_hist(group_name: 'string with folder name to save histograms',
                   table_name: 'histogram name',
                   entries: 'np.array with bin content',
                   bins: 'list of np.array of bins',
                   out_of_range: 'np.array lenght=2 with events out of range',
                   errors: 'np.array with bins uncertainties',
                   labels: 'list with labels of the histogram',
                   scales: 'list with the scales of the histogram'):

        try:
            hist_group = getattr(file.root, group_name)
        except tb.NoSuchNodeError:
            hist_group = file.create_group(file.root, group_name)

        if table_name in hist_group:
            raise ValueError(f"Histogram {table_name} already exists")

        vlarray = file.create_vlarray(hist_group,
                                      table_name + '_bins',
                                      atom=tb.Float64Atom(shape=()),
                                      filters=tbl.filters(compression))
        for ibin in bins:
            vlarray.append(ibin)
        add_carray(hist_group, table_name, entries)
        add_carray(hist_group, table_name + '_outRange', out_of_range)
        add_carray(hist_group, table_name + '_errors', errors)
        file.create_array(hist_group, table_name + '_labels', labels)
        file.create_array(hist_group, table_name + '_scales', scales)
Пример #3
0
def detsim(files_in, file_out, compression, event_range, detector_db,
           run_number, krmap_filename, psfsipm_filename):

    npmt, nsipm = dsim.npmts, dsim.nsipms
    pmt_wid, sipm_wid = dsim.wf_pmt_bin_time, dsim.wf_sipm_bin_time
    nsamp_pmt = int(dsim.wf_buffer_time // pmt_wid)
    nsamp_sipm = int(dsim.wf_buffer_time // sipm_wid)

    generate_wfs_ = fl.map(get_function_generate_wfs(
        krmap_filename=krmap_filename, psfsipm_filename=psfsipm_filename),
                           args=("hits"),
                           out=("wfs"))

    with tb.open_file(file_out, "w",
                      filters=tbl.filters(compression)) as h5out:

        write_mc = fl.sink(mc_info_writer(h5out), args=("mc", "evt"))
        wf_writer_ = fl.sink(wf_writer(h5out,
                                       n_sens_eng=npmt,
                                       n_sens_trk=nsipm,
                                       length_eng=nsamp_pmt,
                                       length_trk=nsamp_sipm),
                             args=("evt", "wfs"))

        save_run_info(h5out, run_number)
        return push(source=load_hits(files_in),
                    pipe=pipe(generate_wfs_, fork(wf_writer_, write_mc)))
Пример #4
0
 def add_carray(hist_group, table_name, var):
     array_atom = tb.Atom.from_dtype(var.dtype)
     array_shape = var.shape
     entry = file.create_carray(hist_group,
                                table_name,
                                atom=array_atom,
                                shape=array_shape,
                                filters=tbl.filters(compression))
     entry[:] = var
Пример #5
0
def wf_writer(h5out, *,
                  group_name  : str = 'detsim',
                  compression : str =  'ZLIB4',
                  n_sens_eng  : int =       12,
                  n_sens_trk  : int =     1792,
                  length_eng  : int           ,
                  length_trk  : int           ) -> Callable[[int, List, List, List], None]:
    """
    Generalised buffer writer which defines a raw waveform writer
    for each type of sensor as well as an event info writer
    with written event, timestamp and a mapping to the
    nexus event number in case of event splitting.
    """

    eng_writer = rwf_writer(h5out,
                            group_name      =  group_name,
                            compression     = compression,
                            table_name      =     'pmtrd',
                            n_sensors       =  n_sens_eng,
                            waveform_length =  length_eng)

    trk_writer = rwf_writer(h5out,
                            group_name      =  group_name,
                            compression     = compression,
                            table_name      =    'sipmrd',
                            n_sensors       =  n_sens_trk,
                            waveform_length =  length_trk)

    try:
        evt_group = getattr(h5out.root, 'Run')
    except tb.NoSuchNodeError:
        evt_group = h5out.create_group(h5out.root, 'Run')

    nexus_evt_tbl = h5out.create_table(evt_group, "events", EventInfo,
                                       "event, timestamp & nexus evt for each index",
                                       tbl.filters(compression))

    def write_waveforms(nexus_evt      :        int ,
                        wfs            : Tuple[List, List]) -> None:

        eng, trk = wfs
        row = nexus_evt_tbl.row
        row["event_number"] = write_waveforms.counter
        row["nexus_evt"]    = nexus_evt
        row.append()

        eng_writer(eng.transpose())
        trk_writer(trk.transpose())

        write_waveforms.counter += 1

    write_waveforms.counter = 0
    return write_waveforms
Пример #6
0
def save_histomanager_to_file(histogram_manager,
                              file_out,
                              mode='w',
                              group='HIST'):
    """Saves the HistoManager and its contained histograms to a file.

    Parameters
    ----------
    file_out : str
    Path of the file were the HistoManager will be written.
    mode     : str, optional
    Writting mode. By default a new file will be created.
    group    : string, optional
    Group name to save the histograms in the file.
    """
    if mode not in 'wa':
        raise ValueError(
            f"Incompatible mode ({mode}) of writting, please use 'w' (write) or 'a' (append)."
        )
    with tb.open_file(file_out, mode, filters=tbl.filters('ZLIB4')) as h5out:
        writer = hist_writer_var(h5out)
        for histoname, histo in histogram_manager.histos.items():
            writer(group, histoname, histo.data, histo.bins, histo.out_range,
                   histo.errors, histo.labels, histo.scale)
Пример #7
0
def position_signal(conf):

    files_in      = glob(os.path.expandvars(conf.files_in))
    file_out      =      os.path.expandvars(conf.file_out)
    detector_db   =                         conf.detector_db
    run_number    =                     int(conf.run_number)
    max_time      =                     int(conf.max_time)
    buffer_length =                   float(conf.buffer_length)
    pre_trigger   =                   float(conf.pre_trigger)
    trg_threshold =                   float(conf.trg_threshold)
    compression   =                         conf.compression

    npmt, nsipm        = get_no_sensors(detector_db, run_number)
    pmt_wid, sipm_wid  = get_sensor_binning(files_in[0])
    nsamp_pmt          = int(buffer_length * units.mus /  pmt_wid)
    nsamp_sipm         = int(buffer_length * units.mus / sipm_wid)

    bin_calculation    = calculate_binning(max_time)
    pmt_binning        = fl.map(bin_calculation,
                                args = ("pmt_wfs" ,  "pmt_binwid"),
                                out  = ("pmt_bins", "pmt_bin_wfs"))

    extract_minmax     = fl.map(bin_minmax,
                                args = "pmt_bins",
                                out  = ("min_bin", "max_bin"))

    sipm_binning       = fl.map(bin_calculation,
                                args = ("sipm_wfs", "sipm_binwid",
                                        "min_bin" ,     "max_bin") ,
                                out  = ("sipm_bins", "sipm_bin_wfs"))

    sensor_order_      = fl.map(partial(sensor_order,
                                        detector_db = detector_db,
                                        run_number  =  run_number),
                                args = ("pmt_bin_wfs", "sipm_bin_wfs"),
                                out  = ("pmt_ord", "sipm_ord"))

    trigger_finder_    = fl.map(trigger_finder(buffer_length,
                                               pmt_wid, trg_threshold),
                                args = "pmt_wfs",
                                out  = "triggers")

    event_times        = fl.map(trigger_times,
                                args = ("triggers", "timestamp", "pmt_bins"),
                                out  = "evt_times")

    calculate_buffers_ = fl.map(calculate_buffers(buffer_length, pre_trigger,
                                                  pmt_wid      ,    sipm_wid),
                                args = ("triggers",
                                        "pmt_bins" ,  "pmt_bin_wfs",
                                        "sipm_bins", "sipm_bin_wfs"),
                                out  = "buffers")

    with tb.open_file(file_out, "w", filters = tbl.filters(compression)) as h5out:

        write_mc         = fl.sink(mc_info_writer(h5out),
                                   args = ("mc", "evt"))
        buffer_writer_   = fl.sink(buffer_writer(h5out                  ,
                                                 n_sens_eng = npmt      ,
                                                 n_sens_trk = nsipm     ,
                                                 length_eng = nsamp_pmt ,
                                                 length_trk = nsamp_sipm),
                                   args = ("evt", "pmt_ord", "sipm_ord",
                                           "evt_times", "buffers"))

        save_run_info(h5out, run_number)
        return push(source = load_sensors(files_in, detector_db, run_number),
                    pipe   = pipe(pmt_binning         ,
                                  extract_minmax      ,
                                  sipm_binning        ,
                                  sensor_order_       ,
                                  trigger_finder_     ,
                                  event_times         ,
                                  calculate_buffers_  ,
                                  fork(buffer_writer_,
                                       write_mc      )))
Пример #8
0
def buffer_writer(h5out, *,
                  group_name  : str = 'detsim',
                  compression : str =  'ZLIB4',
                  n_sens_eng  : int =       12,
                  n_sens_trk  : int =     1792,
                  length_eng  : int           ,
                  length_trk  : int           ) -> Callable[[int, List, List, List], None]:
    """
    Generalised buffer writer which defines a raw waveform writer
    for each type of sensor as well as an event info writer
    with written event, timestamp and a mapping to the
    nexus event number in case of event splitting.
    """

    eng_writer = rwf_writer(h5out,
                            group_name      =  group_name,
                            compression     = compression,
                            table_name      =     'pmtrd',
                            n_sensors       =  n_sens_eng,
                            waveform_length =  length_eng)

    trk_writer = rwf_writer(h5out,
                            group_name      =  group_name,
                            compression     = compression,
                            table_name      =    'sipmrd',
                            n_sensors       =  n_sens_trk,
                            waveform_length =  length_trk)

    try:
        evt_group = getattr(h5out.root, 'Run')
    except tb.NoSuchNodeError:
        evt_group = h5out.create_group(h5out.root, 'Run')

    nexus_evt_tbl = h5out.create_table(evt_group, "events", EventInfo,
                                       "event, timestamp & nexus evt for each index",
                                       tbl.filters(compression))

    def write_buffers(nexus_evt      :        int ,
                      eng_sens_order : List[  int],
                      trk_sens_order : List[  int],
                      timestamps     : List[  int],
                      events         : List[Tuple]) -> None:

        for t_stamp, (eng, trk) in zip(timestamps, events):
            row = nexus_evt_tbl.row
            row["event_number"] = write_buffers.counter
            row["timestamp"]    = t_stamp
            row["nexus_evt"]    = nexus_evt
            row.append()

            e_sens = np.zeros((n_sens_eng, length_eng), np.int)
            t_sens = np.zeros((n_sens_trk, length_trk), np.int)

            e_sens[eng_sens_order] = eng
            eng_writer(e_sens)

            t_sens[trk_sens_order] = trk
            trk_writer(t_sens)

            write_buffers.counter += 1
    write_buffers.counter = 0
    return write_buffers