Exemple #1
0
def save_run_info(h5out: tb.file.File, run_number: int) -> None:
    """
    Saves the run number used for the detsim
    format job in the format expected by the
    IC cities.

    h5out      : pytables file
                 The open output file
    run_number : int
                 The run number set in the config
    """

    try:
        run_table = getattr(h5out.root.Run, 'runInfo')
    except tb.NoSuchNodeError:
        try:
            run_group = getattr(h5out.root, 'Run')
            run_table = h5out.create_table(run_group, "runInfo", RunInfo,
                                           "Run number used in detsim")
        except tb.NoSuchNodeError:
            run_group = h5out.create_group(h5out.root, 'Run')
            run_table = h5out.create_table(run_group, "runInfo", RunInfo,
                                           "Run number used in detsim")
    row = run_table.row
    row["run_number"] = run_number
    row.append()
Exemple #2
0
def copy_sensor_table(h5in_name: str, h5out: tb.file.File):

    with tb.open_file(h5in_name) as dIn:
        try:
            sensor_info = dIn.root.Sensors
            h5out.copy_node(sensor_info, newparent=h5out.root, recursive=True)
        except tb.exceptions.NoSuchNodeError:
            sensor_info = None
Exemple #3
0
def rwf_writer(h5out: tb.file.File,
               *,
               group_name: str,
               table_name: str,
               compression: str = 'ZLIB4',
               n_sensors: int,
               waveform_length: int) -> Callable:
    """
    Defines group and table where raw waveforms
    will be written.

    h5out           : pytables file
                      Output file where waveforms to be saved
    group_name      : str
                      Name of the group in h5in.root
                      Known options: RD, BLR
                      Setting to None will save directly in root
    table_name      : str
                      Name of the table
                      Known options: pmtrwf, pmtcwf, sipmrwf
    compression     : str
                      file compression
    n_sensors       : int
                      number of sensors in the table (shape[0])
    waveform_length : int
                      Number of samples per sensor
    """
    if group_name is None:
        rwf_group = h5out.root
    elif group_name in h5out.root:
        rwf_group = getattr(h5out.root, group_name)
    else:
        rwf_group = h5out.create_group(h5out.root, group_name)

    rwf_table = h5out.create_earray(rwf_group,
                                    table_name,
                                    atom=tb.Int16Atom(),
                                    shape=(0, n_sensors, waveform_length),
                                    filters=tbl.filters(compression))

    def write_rwf(waveform: np.ndarray) -> None:
        """
        Writes raw waveform arrays to file.
        waveform : np.ndarray
                   shape = (n_sensors, waveform_length) array
                   of sensor charge.
        """
        rwf_table.append(waveform.reshape(1, n_sensors, waveform_length))

    return write_rwf
Exemple #4
0
def save_run_info(h5out      : tb.file.File,
                  run_number :          int) -> None:
    try:
        run_table = getattr(h5out.root.Run, 'runInfo')
    except tb.NoSuchNodeError:
        try:
            run_group = getattr(h5out.root, 'Run')
            run_table = h5out.create_table(run_group, "runInfo", RunInfo,
                                           "Run number used in detsim")
        except tb.NoSuchNodeError:
            run_group = h5out.create_group(h5out.root, 'Run')
            run_table = h5out.create_table(run_group, "runInfo", RunInfo,
                                           "Run number used in detsim")
    row = run_table.row
    row["run_number"] = run_number
    row.append()
Exemple #5
0
def _store_pandas_as_tables(h5out: tb.file.File,
                            df: pd.DataFrame,
                            group_name: str,
                            table_name: str,
                            compression: str = 'ZLIB4',
                            descriptive_string: [str] = "",
                            str_col_length: int = 32) -> None:
    if len(df) == 0:
        warnings.warn(f'dataframe is empty', UserWarning)
    if group_name not in h5out.root:
        group = h5out.create_group(h5out.root, group_name)

    group = getattr(h5out.root, group_name)
    if table_name not in group:
        tabledef = _make_tabledef(df.dtypes)
        table = make_table(h5out,
                           group=group_name,
                           name=table_name,
                           fformat=tabledef,
                           description=descriptive_string,
                           compression=compression)

    table = getattr(group, table_name)
    if not np.array_equal(df.columns, table.colnames):
        raise TableMismatch(
            f'dataframe differs from already existing table structure')
    for indx in df.index:
        tablerow = table.row
        for colname in table.colnames:
            tablerow[colname] = df.at[indx, colname]
        tablerow.append()
    table.flush()
Exemple #6
0
def df_writer(h5out: tb.file.File,
              df: pd.DataFrame,
              group_name: str,
              table_name: str,
              compression: str = 'ZLIB4',
              descriptive_string: str = "",
              str_col_length: int = 32,
              columns_to_index: Optional[Sequence[str]] = None) -> None:
    """ The function writes a dataframe to open pytables file.
    Parameters:
    h5out              : open pytable file for writing
    df                 : DataFrame to be written
    group_name         : group name where table is to be saved)
                         (group is created if doesnt exist)
    table_name         : table name
                         (table is created if doesnt exist)
    compression        : compression type
    descriptive_string : table description
    str_col_length     : maximum length in characters of strings
    columns_to_index   : list of columns to be flagged for indexing
    """
    if group_name not in h5out.root:
        group = h5out.create_group(h5out.root, group_name)
    group = getattr(h5out.root, group_name)

    arr = df.to_records(index=False)

    if table_name not in group:
        tabledef = _make_tabledef(arr.dtype, str_col_length=str_col_length)
        table = make_table(h5out,
                           group=group_name,
                           name=table_name,
                           fformat=tabledef,
                           description=descriptive_string,
                           compression=compression)
    else:
        table = getattr(group, table_name)

    data_types = table.dtype
    if len(arr) == 0:
        warnings.warn(f'dataframe is empty', UserWarning)
    else:
        _check_castability(arr, data_types)
        columns = list(data_types.names)
        arr = arr[columns].astype(data_types)
        table.append(arr)
        table.flush()

    if columns_to_index is not None:
        if set(columns_to_index).issubset(set(df.columns)):
            table.set_attr('columns_to_index', columns_to_index)
        else:
            not_found = list(set(columns_to_index).difference(set(df.columns)))
            raise KeyError(f'columns {not_found} not present in the dataframe')
Exemple #7
0
def store_events_reco_counters(oFile: tb.file.File, group_name: str,
                               simulated_events: int, stored_events: int,
                               smE_filter_events: int,
                               fid_filter_events: int) -> None:
    """
	Stores the event counters as attributes of oFile / group_name
	"""
    oFile.set_node_attr(group_name, 'simulated_events', simulated_events)
    oFile.set_node_attr(group_name, 'stored_events', stored_events)
    oFile.set_node_attr(group_name, 'smE_filter_events', smE_filter_events)
    oFile.set_node_attr(group_name, 'fid_filter_events', fid_filter_events)
Exemple #8
0
def store_events_ana_counters(oFile: tb.file.File, group_name: str,
                              events_df: pd.DataFrame) -> Tuple[int, int, int]:
    """
    Stores the event counters as attributes of oFile / group_name
    """
    tracks_filter_events = events_df.tracks_filter.sum()
    blobs_filter_events = events_df.blobs_filter.sum()
    roi_filter_events = events_df.roi_filter.sum()

    oFile.set_node_attr(group_name, 'tracks_filter_events',
                        tracks_filter_events)
    oFile.set_node_attr(group_name, 'blobs_filter_events', blobs_filter_events)
    oFile.set_node_attr(group_name, 'roi_filter_events', roi_filter_events)

    return tracks_filter_events, blobs_filter_events, roi_filter_events