Esempio n. 1
0
class EventObservables(tables.IsDescription):
    """Store information about the observables of an event.

    The observables are described for each station independently.  So, for each
    event (with a unique :attr:`id`), there is a table row for each station
    (with a unique :attr:`station_id`), such that only the (id, station_id)
    combinations are unique in the table.

    .. attribute:: id

        a unique identifier for the simulated event (only unique in this table)

    .. attribute:: station_id

        station identifier, such that you can do::

            >>> station = cluster.stations[station_id]

    .. attribute:: r, phi, x, y

        coordinates of the station.  Depending on the simulation, this might be
        constant throughout the simulation, or it might change event by event.

    .. attribute:: alpha

        rotation of the station around its center

    .. attribute:: N

        number of detectors with at least one particle

    """
    id = tables.UInt32Col()
    station_id = tables.UInt8Col()
    timestamp = tables.Time32Col()
    nanoseconds = tables.UInt32Col()
    ext_timestamp = tables.UInt64Col()

    r = tables.Float32Col()
    phi = tables.Float32Col()
    x = tables.Float32Col()
    y = tables.Float32Col()
    alpha = tables.Float32Col()
    N = tables.UInt8Col()
    t1 = tables.Float32Col()
    t2 = tables.Float32Col()
    t3 = tables.Float32Col()
    t4 = tables.Float32Col()
    n1 = tables.Float32Col()
    n2 = tables.Float32Col()
    n3 = tables.Float32Col()
    n4 = tables.Float32Col()
class TraceWriteRange(tables.IsDescription):
    index = tables.UInt32Col()
    destlo = tables.UInt32Col()
    desthi = tables.UInt32Col()
    pc = tables.UInt32Col()
    relocatedpc = tables.UInt32Col()
    lr = tables.UInt32Col()
    relocatedlr = tables.UInt32Col()
    byteswritten = tables.UInt32Col()
    numops = tables.UInt32Col()
    cpsr = tables.UInt32Col()
    caller = tables.StringCol(40)
    substage = tables.UInt8Col()
class HitInfoTable(tb.IsDescription):
    event_number = tb.Int64Col(pos=0)
    trigger_number = tb.UInt32Col(pos=1)
    relative_BCID = tb.UInt8Col(pos=2)
    LVL1ID = tb.UInt16Col(pos=3)
    column = tb.UInt8Col(pos=4)
    row = tb.UInt16Col(pos=5)
    tot = tb.UInt8Col(pos=6)
    BCID = tb.UInt16Col(pos=7)
    TDC = tb.UInt16Col(pos=8)
    TDC_time_stamp = tb.UInt8Col(pos=9)
    trigger_status = tb.UInt8Col(pos=10)
    service_record = tb.UInt32Col(pos=11)
    event_status = tb.UInt16Col(pos=12)
class TriggerInfo(tables.IsDescription):
    """
	Describe the trigger informations of the telescopes events
	Attributes:
	-----------
		event_id : id of the corresponding event
		time_s : time of the event in second since 1st january 1970
		time_qns : time in nanosecond (or picosecond) to complete the time in second
		obs_id : id of the observation
	"""
    event_id = tables.UInt64Col()
    time_s = tables.UInt32Col()
    time_qns = tables.UInt32Col()
    obs_id = tables.UInt64Col()
Esempio n. 5
0
class GenomeTable(tables.IsDescription):
    NCBITaxonId = tables.UInt32Col(pos=0)
    UniProtSpeciesCode = tables.StringCol(5, pos=1)
    TotEntries = tables.UInt32Col(pos=2)
    TotAA = tables.UInt32Col(pos=3)
    EntryOff = tables.UInt32Col(pos=4)
    SciName = tables.StringCol(255, pos=5)
    CommonName = tables.StringCol(64, pos=6)
    SynName = tables.StringCol(64, pos=7)
    Release = tables.StringCol(128, pos=8)
    Url = tables.StringCol(255, pos=9)
    Source = tables.StringCol(255, pos=10)
    Date = tables.Time32Col(pos=11)
    IsPolyploid = tables.BoolCol(pos=12)
class ClusterInfoTable(tb.IsDescription):
    event_number = tb.Int64Col(pos=0)
    cluster_id = tb.UInt16Col(pos=1)
    n_hits = tb.UInt32Col(pos=2)
    charge = tb.Float32Col(pos=3)
    frame = tb.UInt16Col(pos=4)
    seed_column = tb.UInt16Col(pos=5)
    seed_row = tb.UInt16Col(pos=6)
    mean_column = tb.Float64Col(pos=7)
    mean_row = tb.Float64Col(pos=8)
    err_column = tb.Float32Col(pos=9)
    err_row = tb.Float32Col(pos=10)
    n_cluster = tb.UInt32Col(pos=11)
    cluster_shape = tb.Int64Col(pos=12)
Esempio n. 7
0
class cells(tb.IsDescription):
    galID = tb.StringCol(8)  # 8-character string
    expn = tb.StringCol(8)
    cellID = tb.UInt32Col()  # Unsigned integer
    xPos = tb.Float32Col()
    yPos = tb.Float32Col()
    zPos = tb.Float32Col()
    lognH = tb.Float32Col()
    logT = tb.Float32Col()
    SNII = tb.Float32Col()
    cellsize = tb.Float32Col()  # float (single-precision) (64 bits)
    HI = tb.UInt32Col()
    MgII = tb.UInt32Col()
    CIV = tb.UInt32Col()
    OVI = tb.UInt32Col()
Esempio n. 8
0
class HisparcEvent(tables.IsDescription):
    # DISCUSS: use of signed (dflt -1) vs unsigned (labview code)
    event_id = tables.UInt32Col(pos=0)
    timestamp = tables.Time32Col(pos=2)
    nanoseconds = tables.UInt32Col(pos=3)
    ext_timestamp = tables.UInt64Col(pos=4)
    data_reduction = tables.BoolCol(pos=5)
    trigger_pattern = tables.UInt32Col(pos=6)
    baseline = tables.Int16Col(shape=4, dflt=-1, pos=7)
    std_dev = tables.Int16Col(shape=4, dflt=-1, pos=8)
    n_peaks = tables.Int16Col(shape=4, dflt=-1, pos=9)
    pulseheights = tables.Int16Col(shape=4, dflt=-1, pos=10)
    integrals = tables.Int32Col(shape=4, dflt=-1, pos=11)
    traces = tables.Int32Col(shape=4, dflt=-1, pos=12)
    event_rate = tables.Float32Col(pos=13)
 class Summary(tables.IsDescription):
     bin_number = tables.UInt32Col(                    pos=0)
     chromosome = tables.StringCol(chromosome_name_length, pos=2)
     avg_cell_type_percentile = tables.Float64Col(     pos=1)
     cell_types_gte_95th_percentile = tables.UInt32Col(pos=2)
     cell_types_lt_95th_percentile = tables.UInt32Col( pos=3)
     lines_gte_95th_percentile = tables.UInt32Col(     pos=4)
     lines_lt_95th_percentile = tables.UInt32Col(      pos=5)
     cell_types_gte_5th_percentile = tables.UInt32Col( pos=6)
     cell_types_lt_5th_percentile = tables.UInt32Col(  pos=7)
     lines_gte_5th_percentile = tables.UInt32Col(      pos=8)
     lines_lt_5th_percentile = tables.UInt32Col(       pos=9)
Esempio n. 10
0
    def _prepare_coincidence_tables(self):
        """Create coincidence tables

        These are the same as the tables created by
        :class:`~sapphire.analysis.coincidences.CoincidencesESD`.
        This makes it easy to link events detected by multiple stations.

        """
        self.coincidence_group = self.data.create_group(self.output_path,
                                                        'coincidences',
                                                        createparents=True)
        try:
            self.coincidence_group._v_attrs.cluster = self.cluster
        except tables.HDF5ExtError:
            warnings.warn('Unable to store cluster object, to large for HDF.')

        description = storage.Coincidence
        s_columns = {
            's%d' % station.number: tables.BoolCol(pos=p)
            for p, station in enumerate(self.cluster.stations, 12)
        }
        description.columns.update(s_columns)

        self.coincidences = self.data.create_table(self.coincidence_group,
                                                   'coincidences', description)

        self.c_index = self.data.create_vlarray(self.coincidence_group,
                                                'c_index',
                                                tables.UInt32Col(shape=2))

        self.s_index = self.data.create_vlarray(self.coincidence_group,
                                                's_index',
                                                tables.VLStringAtom())
Esempio n. 11
0
 def test_create_weather_table(self):
     description = {
         'event_id': tables.UInt32Col(pos=0),
         'timestamp': tables.Time32Col(pos=1),
         'temp_inside': tables.Float32Col(pos=2),
         'temp_outside': tables.Float32Col(pos=3),
         'humidity_inside': tables.Int16Col(pos=4),
         'humidity_outside': tables.Int16Col(pos=5),
         'barometer': tables.Float32Col(pos=6),
         'wind_dir': tables.Int16Col(pos=7),
         'wind_speed': tables.Int16Col(pos=8),
         'solar_rad': tables.Int16Col(pos=9),
         'uv': tables.Int16Col(pos=10),
         'evapotranspiration': tables.Float32Col(pos=11),
         'rain_rate': tables.Float32Col(pos=12),
         'heat_index': tables.Int16Col(pos=13),
         'dew_point': tables.Float32Col(pos=14),
         'wind_chill': tables.Float32Col(pos=15)
     }
     file = MagicMock()
     result = esd._create_weather_table(file, sentinel.group)
     file.create_table.assert_called_once_with(sentinel.group,
                                               'weather',
                                               description,
                                               createparents=True)
     self.assertEqual(result, file.create_table.return_value)
Esempio n. 12
0
class ReconstructedKascadeEvent(tables.IsDescription):
    """Store information about reconstructed events"""

    # r, phi is core position

    id = tables.UInt32Col()
    station_id = tables.UInt8Col()
    r = tables.Float32Col()
    phi = tables.Float32Col()
    alpha = tables.Float32Col()
    t1 = tables.Float32Col()
    t2 = tables.Float32Col()
    t3 = tables.Float32Col()
    t4 = tables.Float32Col()
    n1 = tables.Float32Col()
    n2 = tables.Float32Col()
    n3 = tables.Float32Col()
    n4 = tables.Float32Col()
    reference_theta = tables.Float32Col()
    reference_phi = tables.Float32Col()
    reconstructed_theta = tables.Float32Col()
    reconstructed_phi = tables.Float32Col()
    min_n134 = tables.Float32Col()

    k_energy = tables.FloatCol()
    k_core_pos = tables.FloatCol(shape=2)
    k_Num_e = tables.FloatCol()
    k_Num_mu = tables.FloatCol()
    k_dens_e = tables.FloatCol(shape=4)
    k_dens_mu = tables.FloatCol(shape=4)
    k_P200 = tables.FloatCol()
    k_T200 = tables.FloatCol()
Esempio n. 13
0
class ReconstructedCoincidence(tables.IsDescription):
    """Store information about reconstructed coincidences"""

    id = tables.UInt32Col(pos=1)
    ext_timestamp = tables.UInt64Col(pos=2)
    min_n = tables.Float32Col(pos=3)

    x = tables.Float32Col(pos=4)
    y = tables.Float32Col(pos=5)
    zenith = tables.Float32Col(pos=6)
    azimuth = tables.Float32Col(pos=7)
    size = tables.Float32Col(pos=8)
    energy = tables.Float32Col(pos=9)
    error_x = tables.Float32Col(pos=10)
    error_y = tables.Float32Col(pos=11)
    error_zenith = tables.Float32Col(pos=12)
    error_azimuth = tables.Float32Col(pos=13)
    error_size = tables.Float32Col(pos=14)
    error_energy = tables.Float32Col(pos=15)

    reference_x = tables.Float32Col(pos=16)
    reference_y = tables.Float32Col(pos=17)
    reference_zenith = tables.Float32Col(pos=18)
    reference_azimuth = tables.Float32Col(pos=19)
    reference_size = tables.Float32Col(pos=20)
    reference_energy = tables.Float32Col(pos=21)
Esempio n. 14
0
class BlastTable(SimpleTable):
      query      = tables.StringCol(5)
      subject    = tables.StringCol(5)

      pctid      = tables.Float32Col()
      hitlen     = tables.UInt16Col()
      nmismatch  = tables.UInt16Col()
      ngaps      = tables.UInt16Col()

      qstart     = tables.UInt32Col()
      qstop      = tables.UInt32Col()
      sstart     = tables.UInt32Col()
      sstop      = tables.UInt32Col()

      evalue     = tables.Float64Col()
      score      = tables.Float32Col()
Esempio n. 15
0
def _create_singles_table(file, group):
    """Create singles table in PyTables file

    Create a singles table containing the ESD singles columns which are
    available in the TSV download.

    :param file: PyTables file.
    :param group: the group to contain the singles table, which need not
                  exist.

    """
    description = {
        'event_id': tables.UInt32Col(pos=0),
        'timestamp': tables.Time32Col(pos=1),
        'mas_ch1_low': tables.Int32Col(pos=2),
        'mas_ch1_high': tables.Int32Col(pos=3),
        'mas_ch2_low': tables.Int32Col(pos=4),
        'mas_ch2_high': tables.Int32Col(pos=5),
        'slv_ch1_low': tables.Int32Col(pos=6),
        'slv_ch1_high': tables.Int32Col(pos=7),
        'slv_ch2_low': tables.Int32Col(pos=8),
        'slv_ch2_high': tables.Int32Col(pos=9)
    }

    return file.create_table(group, 'singles', description, createparents=True)
Esempio n. 16
0
class KalmanEstimatesVelOnlyWithDirectionPositionCovariance(PT.IsDescription):
    obj_id = PT.UInt32Col(pos=0)
    frame = PT.Int64Col(pos=1)
    timestamp = PT.Float64Col(pos=2)  # time of reconstruction
    x = PT.Float32Col(pos=3)
    y = PT.Float32Col(pos=4)
    z = PT.Float32Col(pos=5)
    xvel = PT.Float32Col(pos=6)
    yvel = PT.Float32Col(pos=7)
    zvel = PT.Float32Col(pos=8)
    # save parts of P matrix
    P00 = PT.Float32Col(pos=9)
    P01 = PT.Float32Col(pos=10)
    P02 = PT.Float32Col(pos=11)
    P11 = PT.Float32Col(pos=12)
    P12 = PT.Float32Col(pos=13)
    P22 = PT.Float32Col(pos=14)
    P33 = PT.Float32Col(pos=15)
    P44 = PT.Float32Col(pos=16)
    P55 = PT.Float32Col(pos=17)
    # save estimated direction of long body axis
    rawdir_x = PT.Float32Col(pos=18)
    rawdir_y = PT.Float32Col(pos=19)
    rawdir_z = PT.Float32Col(pos=20)
    dir_x = PT.Float32Col(pos=21)
    dir_y = PT.Float32Col(pos=22)
    dir_z = PT.Float32Col(pos=23)
Esempio n. 17
0
    def store_coincidences(self):
        if '/coincidences' not in self.data:
            group = self.data.createGroup('/', 'coincidences')
            group._v_attrs.cluster = self.cluster

            self.c_index = []
            self.coincidences = self.data.createTable(group, 'coincidences',
                                                      storage.Coincidence)
            self.observables = self.data.createTable(group, 'observables',
                                                     storage.EventObservables)

            progress = pb.ProgressBar(
                widgets=[pb.Percentage(), pb.Bar(),
                         pb.ETA()])
            for coincidence in progress(self.data.root.c_index):
                self.store_coincidence(coincidence)

            c_index = self.data.createVLArray(group, 'c_index',
                                              tables.UInt32Col())
            for coincidence in self.c_index:
                c_index.append(coincidence)
            c_index.flush()
            self.c_index = c_index
        else:
            # Force new cluster geometry
            group = self.data.getNode('/', 'coincidences')
            group._v_attrs.cluster = self.cluster
Esempio n. 18
0
    def store_coincidences(self, cluster=None):
        """Store the previously found coincidences.

        After you have searched for coincidences, you can store the
        more user-friendly results in the coincidences group using this
        method.

        :param cluster: optionally store a
            :class:`~sapphire.clusters.BaseCluster` instance in the
            coincidences group for future reference.

        """
        if cluster:
            self.coincidence_group._v_attrs.cluster = cluster

        self.c_index = []
        self.coincidences = self.data.create_table(self.coincidence_group,
                                                   'coincidences',
                                                   storage.Coincidence)
        self.observables = self.data.create_table(self.coincidence_group,
                                                  'observables',
                                                  storage.EventObservables)

        for coincidence in pbar(self.coincidence_group._src_c_index,
                                show=self.progress):
            self._store_coincidence(coincidence)

        c_index = self.data.create_vlarray(self.coincidence_group, 'c_index',
                                           tables.UInt32Col())
        for coincidence in self.c_index:
            c_index.append(coincidence)
        c_index.flush()
        self.c_index = c_index
Esempio n. 19
0
    def store_coincidences(self):
        """Store the previously found coincidences.

        After you have searched for coincidences, you can store the
        more user-friendly results in the coincidences group using this
        method.

        """
        self.c_index = []
        self.coincidences = self.data.create_table(self.coincidence_group,
                                                   'coincidences',
                                                   storage.Coincidence)
        self.observables = self.data.create_table(self.coincidence_group,
                                                  'observables',
                                                  storage.EventObservables)

        for coincidence in pbar(self.coincidence_group._src_c_index,
                                show=self.progress):
            self._store_coincidence(coincidence)

        c_index = self.data.create_vlarray(self.coincidence_group, 'c_index',
                                           tables.UInt32Col())
        for coincidence in self.c_index:
            c_index.append(coincidence)
        c_index.flush()
        self.c_index = c_index
Esempio n. 20
0
    def store_coincidences(self):
        print "Storing coincidences..."
        if '/coincidences' not in self.data:
            group = self.data.create_group('/', 'coincidences')
            group._v_attrs.cluster = self.cluster

            self.c_index = []
            self.coincidences = self.data.create_table(group, 'coincidences',
                                                       storage.Coincidence)
            self.observables = self.data.create_table(group, 'observables',
                                                      storage.EventObservables)

            for coincidence in pbar(self.data.root.c_index):
                self.store_coincidence(coincidence)

            c_index = self.data.create_vlarray(group, 'c_index',
                                               tables.UInt32Col())
            for coincidence in self.c_index:
                c_index.append(coincidence)
            c_index.flush()
            self.c_index = c_index
        else:
            # Force new cluster geometry
            group = self.data.get_node('/', 'coincidences')
            group._v_attrs.cluster = self.cluster
Esempio n. 21
0
class LightningEvent(tables.IsDescription):
    event_id = tables.UInt32Col(pos=0)
    timestamp = tables.Time32Col(pos=1)
    corr_distance = tables.Int16Col(pos=2)
    uncorr_distance = tables.Int16Col(pos=3)
    uncorr_angle = tables.Float32Col(pos=4)
    corr_angle = tables.Float32Col(pos=5)
Esempio n. 22
0
def state_to_table_dtype(
        state, str_buffer_safety_factor=DTYPE_STR_BUFFER_SAFETY_FACTOR):
    """
    Get a state of an object represented as a dictionary and derive the
    appropriate type of a tables.Table.

    :param state:       The state of an object.
    :type state:        dict
    :raises:            :class:`pymc.UnknownTypeException`
    """
    dtype_dict = {}
    for name in state.keys():
        if isinstance(state[name], int):
            dtype = pt.UInt32Col()
        elif isinstance(state[name], float):
            dtype = pt.Float64Col()
        elif isinstance(state[name], str):
            dtype = pt.StringCol(itemsize=len(state[name]) *
                                 str_buffer_safety_factor)
        elif isinstance(state[name], np.ndarray):
            dtype = pt.Float64Col(shape=state[name].shape)
        else:
            raise UnknownTypeException(
                'I cannot deal with the type of %s (%s)' %
                (name, type(state[name])))
        dtype_dict[name] = dtype
    return dtype_dict
Esempio n. 23
0
class Spectrum_12_4096(tb.IsDescription):
    """ PyTables table descriptor: storage of spectral data
    For hipsr_200_16384 firmware: 200 MHz, 16384 channels
    """
    id = tb.Int32Col(pos=0)  # Unique ID
    timestamp = tb.Time64Col(pos=1)  # Timestamp (at BRAM read)
    xx = tb.UInt32Col(shape=4096, pos=2)  # XX Autocorrelation data
    yy = tb.UInt32Col(shape=4096, pos=3)  # YY Autocorrelation data
    # re_xy      = tb.Int32Col(shape=16384,pos=4)   # XY Cross correlation - real
    #im_xy      = tb.Int32Col(shape=16384,pos=5)   # XY Cross correlation - imag
    xx_cal_on = tb.Int32Col(shape=16, pos=6)  # Noise diode ON, X pol
    xx_cal_off = tb.Int32Col(shape=16, pos=7)  # Noise diode OFF, X pol
    yy_cal_on = tb.Int32Col(shape=16, pos=8)  # Noise diode ON, Y pol
    yy_cal_off = tb.Int32Col(shape=16, pos=9)  # Noise diode OFF, Y pol
    fft_of = tb.BoolCol(pos=10)  # FFT overflow flag
    adc_clip = tb.BoolCol(pos=11)  # ADC clipping flag
Esempio n. 24
0
def _create_weather_table(file, group):
    """Create weather table in PyTables file

    Create a weather table containing the ESD weather columns which are
    available in the TSV download.

    :param file: PyTables file.
    :param group: the group to contain the weather table, which need not
                  exist.

    """
    description = {
        'event_id': tables.UInt32Col(pos=0),
        'timestamp': tables.Time32Col(pos=1),
        'temp_inside': tables.Float32Col(pos=2),
        'temp_outside': tables.Float32Col(pos=3),
        'humidity_inside': tables.Int16Col(pos=4),
        'humidity_outside': tables.Int16Col(pos=5),
        'barometer': tables.Float32Col(pos=6),
        'wind_dir': tables.Int16Col(pos=7),
        'wind_speed': tables.Int16Col(pos=8),
        'solar_rad': tables.Int16Col(pos=9),
        'uv': tables.Int16Col(pos=10),
        'evapotranspiration': tables.Float32Col(pos=11),
        'rain_rate': tables.Float32Col(pos=12),
        'heat_index': tables.Int16Col(pos=13),
        'dew_point': tables.Float32Col(pos=14),
        'wind_chill': tables.Float32Col(pos=15)
    }

    return file.create_table(group, 'weather', description, createparents=True)
Esempio n. 25
0
class Record(tb.IsDescription):
    var1 = tb.StringCol(itemsize=4, dflt=b"abcd", pos=0)
    var2 = tb.StringCol(itemsize=1, dflt=b"a", pos=1)
    var3 = tb.BoolCol(dflt=1)
    var4 = tb.Int8Col(dflt=1)
    var5 = tb.UInt8Col(dflt=1)
    var6 = tb.Int16Col(dflt=1)
    var7 = tb.UInt16Col(dflt=1)
    var8 = tb.Int32Col(dflt=1)
    var9 = tb.UInt32Col(dflt=1)
    var10 = tb.Int64Col(dflt=1)
    var11 = tb.Float32Col(dflt=1.0)
    var12 = tb.Float64Col(dflt=1.0)
    var13 = tb.ComplexCol(itemsize=8, dflt=(1.+0.j))
    var14 = tb.ComplexCol(itemsize=16, dflt=(1.+0.j))
    if hasattr(tb, 'Float16Col'):
        var15 = tb.Float16Col(dflt=1.0)
    if hasattr(tb, 'Float96Col'):
        var16 = tb.Float96Col(dflt=1.0)
    if hasattr(tb, 'Float128Col'):
        var17 = tb.Float128Col(dflt=1.0)
    if hasattr(tb, 'Complex196Col'):
        var18 = tb.ComplexCol(itemsize=24, dflt=(1.+0.j))
    if hasattr(tb, 'Complex256Col'):
        var19 = tb.ComplexCol(itemsize=32, dflt=(1.+0.j))
Esempio n. 26
0
class HisparcEvent(tables.IsDescription):
    """HiSPARC event table description."""

    event_id = tables.UInt32Col(pos=0)
    timestamp = tables.Time32Col(pos=1)
    nanoseconds = tables.UInt32Col(pos=2)
    ext_timestamp = tables.UInt64Col(pos=3)
    data_reduction = tables.BoolCol(pos=4)
    trigger_pattern = tables.UInt32Col(pos=5)
    baseline = tables.Int16Col(shape=4, dflt=-1, pos=6)
    std_dev = tables.Int16Col(shape=4, dflt=-1, pos=7)
    n_peaks = tables.Int16Col(shape=4, dflt=-1, pos=8)
    pulseheights = tables.Int16Col(shape=4, dflt=-1, pos=9)
    integrals = tables.Int32Col(shape=4, dflt=-1, pos=10)
    traces = tables.Int32Col(shape=4, dflt=-1, pos=11)
    event_rate = tables.Float32Col(pos=12)
Esempio n. 27
0
def _create_coincidences_tables(file, group, station_groups):
    """Setup coincidence tables

    :return: the created coincidences group.

    """
    coin_group = group + '/coincidences'

    # Create coincidences table
    description = storage.Coincidence
    s_columns = {
        's%d' % station: tables.BoolCol(pos=p)
        for p, station in enumerate(station_groups, 12)
    }
    description.columns.update(s_columns)
    coincidences = file.create_table(coin_group,
                                     'coincidences',
                                     description,
                                     createparents=True)

    # Create c_index
    file.create_vlarray(coin_group, 'c_index', tables.UInt32Col(shape=2))

    # Create and fill s_index
    s_index = file.create_vlarray(coin_group, 's_index', tables.VLStringAtom())
    for station_group in itervalues(station_groups):
        s_index.append(station_group['group'].encode('utf-8'))

    return coincidences._v_parent
Esempio n. 28
0
class Lumitable(t.IsDescription):
    fillnum = t.UInt32Col(shape=(), dflt=0, pos=0)
    runnum = t.UInt32Col(shape=(), dflt=0, pos=1)
    lsnum = t.UInt32Col(shape=(), dflt=0, pos=2)
    nbnum = t.UInt32Col(shape=(), dflt=0, pos=3)
    #timestampsec = t.Float32Col(shape=(3564), dflt=0, pos=4)
    #timestampsec = t.UInt32Col(shape=(3564,), dflt=0, pos=4)
    timestampsec = t.UInt32Col(shape=(), dflt=0, pos=4)
    #totsize = t.UInt32Col(shape=(), dflt=0, pos=6)
    #publishnnb = t.UInt8Col(shape=(), dflt=0, pos=7)
    #avg = t.Float32Col(shape=(), dflt=0.0, pos=14)

    avgraw = t.Float32Col(shape=(), dflt=0.0, pos=5)  #new
    avg = t.Float32Col(shape=(), dflt=0.0, pos=6)  #new
    bxraw = t.Float32Col(shape=(3564, ), dflt=0.0, pos=7)  #new
    bx = t.Float32Col(shape=(3564, ), dflt=0.0, pos=8)  #new
Esempio n. 29
0
    def store_coincidences(self, station_numbers=None):
        """Store the previously found coincidences.

        After having searched for coincidences, you can store the more
        user-friendly results in the ``coincidences`` group using this
        method. It also created a ``c_index`` and ``s_index`` table to
        find the source events.

        :param station_numbers: optional list of station_numbers.
            If given these will be used to attach correct numbers to the
            station column names in the coincidences table. Otherwise
            they will simply be numbered by id. This list must be the
            same length as the station_groups.

        """
        n_coincidences = len(self._src_c_index)
        if station_numbers is not None:
            if len(station_numbers) != len(self.station_groups):
                raise RuntimeError(
                    "Number of station numbers must equal number of groups.")
            self.station_numbers = station_numbers
            s_columns = {
                's%d' % number: tables.BoolCol(pos=p)
                for p, number in enumerate(station_numbers, 12)
            }
        else:
            self.station_numbers = None
            s_columns = {
                's%d' % n: tables.BoolCol(pos=(n + 12))
                for n, _ in enumerate(self.station_groups)
            }

        description = storage.Coincidence
        description.columns.update(s_columns)
        self.coincidences = self.data.create_table(self.coincidence_group,
                                                   'coincidences',
                                                   description,
                                                   expectedrows=n_coincidences)

        self.c_index = []

        for coincidence in pbar(self._src_c_index, show=self.progress):
            self._store_coincidence(coincidence)

        c_index = self.data.create_vlarray(self.coincidence_group,
                                           'c_index',
                                           tables.UInt32Col(shape=2),
                                           expectedrows=n_coincidences)
        for observables_idx in pbar(self.c_index, show=self.progress):
            c_index.append(observables_idx)
        c_index.flush()

        s_index = self.data.create_vlarray(self.coincidence_group,
                                           's_index',
                                           tables.VLStringAtom(),
                                           expectedrows=len(
                                               self.station_groups))
        for station_group in self.station_groups:
            s_index.append(station_group.encode('utf-8'))
        s_index.flush()
Esempio n. 30
0
class ReconstructedEvent(tables.IsDescription):
    """Store information about reconstructed events"""

    # r, phi is core position

    id = tables.UInt32Col()
    station_id = tables.UInt8Col()
    r = tables.Float32Col()
    phi = tables.Float32Col()
    alpha = tables.Float32Col()
    t1 = tables.Float32Col()
    t2 = tables.Float32Col()
    t3 = tables.Float32Col()
    t4 = tables.Float32Col()
    n1 = tables.Float32Col()
    n2 = tables.Float32Col()
    n3 = tables.Float32Col()
    n4 = tables.Float32Col()
    reference_theta = tables.Float32Col()
    reference_phi = tables.Float32Col()
    reconstructed_theta = tables.Float32Col()
    reconstructed_phi = tables.Float32Col()
    reference_core_pos = tables.Float32Col(shape=2)
    reconstructed_core_pos = tables.Float32Col(shape=2)
    reference_shower_size = tables.Float32Col()
    reconstructed_shower_size = tables.Float32Col()
    min_n134 = tables.Float32Col()