示例#1
0
def StackCalSoln_Description(nPos=1):
    strLength = 100  
    description = {
            "intTime"       : tables.UInt16Col(),          # integration time used for each dither position
            "nPos"          : tables.UInt16Col(),          # number of dither positions
            "startTimes"    : tables.UInt32Col(nPos),      # list of data start times for each position
            "stopTimes"     : tables.UInt32Col(nPos),      # physical y location
            "darkSpan"      : tables.UInt32Col(2),         # start and stop time for dark data
            "flatSpan"      : tables.UInt32Col(2),         # start and stop time for flat data
            "xPos"          : tables.UInt16Col(nPos),      # rough guess at X centroid for each position
            "yPos"          : tables.UInt16Col(nPos),      # rough guess at Y centroid for each position
            "numRows"       : tables.UInt16Col(),          # y-dimension of image
            "numCols"       : tables.UInt16Col(),          # x-dimension of image
            "upSample"      : tables.UInt16Col(),          # divide each pixel into upSample^2 for subpix registration
            "padFraction"   : tables.Float64Col(),         # x-dimension of image
            "coldCut"       : tables.UInt8Col(),           # any pixels with counts<coldCut is set to NAN during regis.
            "fitPos"        : tables.BoolCol(),            # boolean flag to perform fitting for registration
            "target"        : tables.StringCol(strLength), # name of target object
            "date"          : tables.StringCol(strLength), # date of observation
            "imgDir"        : tables.StringCol(strLength), # location of .IMG files
            "binDir"        : tables.StringCol(strLength), # location of .bin files
            "outputDir"     : tables.StringCol(strLength), # location where stack was originally saved
            "fitsPath"      : tables.StringCol(strLength), # full path (dir+filename) for FITS file with stacked image
            "refFile"       : tables.StringCol(strLength), # path to reference file for image registration
            "useImg"        : tables.BoolCol(),            # boolean flag to use .IMG or .bin data files in stacking
            "doHPM"         : tables.StringCol(strLength), # string controlling manner that hot pix masks are used
            "subtractDark"  : tables.BoolCol(),            # boolean flag to apply dark
            "divideFlat"    : tables.BoolCol(),            # boolean flag to apply flat
            "apMaskRadPrim" : tables.UInt16Col(),          # radius of aperture mask around primary object
            "apMaskRadSec"  : tables.UInt16Col()}          # radius of aperture mask around secondary
    return description
示例#2
0
class _Flavors(tables.IsDescription):
    """Storage of neutrino flavor data.
    """
    nu_e = tables.BoolCol(dflt=False)
    nu_e_bar = tables.BoolCol(dflt=False)
    nu_x = tables.BoolCol(dflt=False)
    nu_x_bar = tables.BoolCol(dflt=False)
示例#3
0
class _Hierarchy(tables.IsDescription):
    """Storage of neutrino mass hierarchy.
    """
    any = tables.BoolCol(dflt=False)
    none = tables.BoolCol(dflt=False)
    normal = tables.BoolCol(dflt=False)
    inverted = tables.BoolCol(dflt=False)
示例#4
0
class HMMInfoParticle(tables.IsDescription):
    #   HMM_ID, taste, din_channel, n_cells, time_start, time_end, thresh,
    #   unit_type, n_repeats, dt, n_states, n_iters, BIC, cost, converged,
    #   area
    hmm_id = tables.Int16Col()
    taste = tables.StringCol(45)
    channel = tables.Int16Col()
    n_cells = tables.Int32Col()
    unit_type = tables.StringCol(15)
    n_trials = tables.Int32Col()
    dt = tables.Float64Col()
    max_iter = tables.Int32Col()
    threshold = tables.Float64Col()
    time_start = tables.Int32Col()
    time_end = tables.Int32Col()
    n_repeats = tables.Int16Col()
    n_states = tables.Int32Col()
    n_iterations = tables.Int32Col()
    BIC = tables.Float64Col()
    cost = tables.Float64Col()
    converged = tables.BoolCol()
    fitted = tables.BoolCol()
    max_log_prob = tables.Float64Col()
    log_likelihood = tables.Float64Col()
    area = tables.StringCol(15)
    hmm_class = tables.StringCol(20)
    notes = tables.StringCol(40)
示例#5
0
    def store_coincidences(self, station_numbers=None):
        """Store the previously found coincidences.

        After having searched for coincidences, you can store the more
        user-friendly results in the ``coincidences`` group using this
        method. It also created a ``c_index`` and ``s_index`` table to
        find the source events.

        :param station_numbers: optional list of station_numbers.
            If given these will be used to attach correct numbers to the
            station column names in the coincidences table. Otherwise
            they will simply be numbered by id. This list must be the
            same length as the station_groups.

        """
        n_coincidences = len(self._src_c_index)
        if station_numbers is not None:
            if len(station_numbers) != len(self.station_groups):
                raise RuntimeError(
                    "Number of station numbers must equal number of groups.")
            self.station_numbers = station_numbers
            s_columns = {
                's%d' % number: tables.BoolCol(pos=p)
                for p, number in enumerate(station_numbers, 12)
            }
        else:
            self.station_numbers = None
            s_columns = {
                's%d' % n: tables.BoolCol(pos=(n + 12))
                for n, _ in enumerate(self.station_groups)
            }

        description = storage.Coincidence
        description.columns.update(s_columns)
        self.coincidences = self.data.create_table(self.coincidence_group,
                                                   'coincidences',
                                                   description,
                                                   expectedrows=n_coincidences)

        self.c_index = []

        for coincidence in pbar(self._src_c_index, show=self.progress):
            self._store_coincidence(coincidence)

        c_index = self.data.create_vlarray(self.coincidence_group,
                                           'c_index',
                                           tables.UInt32Col(shape=2),
                                           expectedrows=n_coincidences)
        for observables_idx in pbar(self.c_index, show=self.progress):
            c_index.append(observables_idx)
        c_index.flush()

        s_index = self.data.create_vlarray(self.coincidence_group,
                                           's_index',
                                           tables.VLStringAtom(),
                                           expectedrows=len(
                                               self.station_groups))
        for station_group in self.station_groups:
            s_index.append(station_group.encode('utf-8'))
        s_index.flush()
 class fraud_data(t.IsDescription):
     transactionIndex = t.UInt16Col()  #points twords the transaction data
     isFraud = t.BoolCol()
     fraudScore = t.Float64Col()
     dateEvaluated = t.TimeCol()
     examinedByOperator = t.BoolCol()
     ownerContacted = t.BoolCol()
示例#7
0
class AnonHMMInfoParticle(tables.IsDescription):
    #   HMM_ID, taste, din_channel, n_cells, time_start, time_end, thresh,
    #   unit_type, n_repeats, dt, n_states, n_iters, BIC, cost, converged,
    #   area
    # info particle for anonymous hmm data, so if hdf5 store isn't tied to a
    # single recording, adds column for rec_dir
    hmm_id = tables.Int16Col()
    taste = tables.StringCol(45)
    channel = tables.Int32Col()
    n_cells = tables.Int32Col()
    unit_type = tables.StringCol(15)
    n_trials = tables.Int32Col()
    dt = tables.Float64Col()
    max_iter = tables.Int32Col()
    threshold = tables.Float64Col()
    time_start = tables.Int32Col()
    time_end = tables.Int32Col()
    n_repeats = tables.Int16Col()
    n_states = tables.Int32Col()
    n_iterations = tables.Int32Col()
    BIC = tables.Float64Col()
    cost = tables.Float64Col()
    converged = tables.BoolCol()
    fitted = tables.BoolCol()
    max_log_prob = tables.Float64Col()
    log_likelihood = tables.Float64Col()
    area = tables.StringCol(15)
    hmm_class = tables.StringCol(20)
    notes = tables.StringCol(30)
    rec_dir = tables.StringCol(150)
示例#8
0
 class RecoEvent(tb.IsDescription):
     obs_id = tb.Int16Col(dflt=-1, pos=0)
     event_id = tb.Int32Col(dflt=-1, pos=1)
     NTels_trig = tb.Int16Col(dflt=0, pos=2)
     NTels_reco = tb.Int16Col(dflt=0, pos=3)
     NTels_reco_lst = tb.Int16Col(dflt=0, pos=4)
     NTels_reco_mst = tb.Int16Col(dflt=0, pos=5)
     NTels_reco_sst = tb.Int16Col(dflt=0, pos=6)
     pointing_az = tb.Float32Col(dflt=np.nan, pos=7)
     pointing_alt = tb.Float32Col(dflt=np.nan, pos=8)
     true_az = tb.Float32Col(dflt=np.nan, pos=9)
     true_alt = tb.Float32Col(dflt=np.nan, pos=10)
     true_energy = tb.Float32Col(dflt=np.nan, pos=11)
     reco_energy = tb.Float32Col(dflt=np.nan, pos=12)
     reco_alt = tb.Float32Col(dflt=np.nan, pos=13)
     reco_az = tb.Float32Col(dflt=np.nan, pos=14)
     offset = tb.Float32Col(dflt=np.nan, pos=15)
     xi = tb.Float32Col(dflt=np.nan, pos=16)
     ErrEstPos = tb.Float32Col(dflt=np.nan, pos=17)
     ErrEstDir = tb.Float32Col(dflt=np.nan, pos=18)
     gammaness = tb.Float32Col(dflt=np.nan, pos=19)
     success = tb.BoolCol(dflt=False, pos=20)
     score = tb.Float32Col(dflt=np.nan, pos=21)
     h_max = tb.Float32Col(dflt=np.nan, pos=22)
     reco_core_x = tb.Float32Col(dflt=np.nan, pos=23)
     reco_core_y = tb.Float32Col(dflt=np.nan, pos=24)
     true_core_x = tb.Float32Col(dflt=np.nan, pos=25)
     true_core_y = tb.Float32Col(dflt=np.nan, pos=26)
     is_valid = tb.BoolCol(dflt=False, pos=27)
class MemoryRegionInfo(tables.IsDescription):
    short_name = tables.StringCol(255)
    parent_name = tables.StringCol(255)
    name = tables.StringCol(255)
    comments = tables.StringCol(512)
    include_children = tables.BoolCol()
    reclassifiable = tables.BoolCol()
    do_print = tables.BoolCol()
示例#10
0
class LightningStatus(tables.IsDescription):
    event_id = tables.UInt32Col(pos=0)
    timestamp = tables.Time32Col(pos=1)
    close_rate = tables.Int16Col(pos=2)
    total_rate = tables.Int16Col(pos=3)
    close_alarm = tables.BoolCol(pos=4)
    sever_alarm = tables.BoolCol(pos=5)
    current_heading = tables.Float32Col(pos=6)
示例#11
0
class digital_mapping_particle(tables.IsDescription):
    '''Pytables particle for storing digital input/output mappings
    '''
    channel = tables.Int16Col()
    name = tables.StringCol(20)
    palatability_rank = tables.Int16Col()
    laser = tables.BoolCol()
    spike_array = tables.BoolCol()
    exclude = tables.BoolCol()
    laser_channels = tables.BoolCol()
class SkipEntry(tables.IsDescription):
    pc = tables.UInt64Col()
    pclo = tables.UInt32Col()
    pchi = tables.UInt32Col()
    disasm = tables.StringCol(256)
    thumb = tables.BoolCol()
    resumepc = tables.UInt64Col()
    resumepclo = tables.UInt32Col()
    resumepchi = tables.UInt32Col()
    isfunction = tables.BoolCol()
示例#13
0
class Trades(tables.IsDescription):
    time = tables.Int64Col()
    trader_id = tables.Int64Col()
    trade_id = tables.Int64Col()
    sequence_id = tables.Int64Col()
    side = tables.Int8Col()
    price = tables.Float64Col()
    quantity = tables.Int64Col()
    origin_id = tables.Int8Col()
    is_auction = tables.BoolCol()
    is_aggressor = tables.BoolCol()
class WriteEntry(tables.IsDescription):
    pc = tables.UInt64Col()
    pclo = tables.UInt32Col()
    pchi = tables.UInt32Col()
    thumb = tables.BoolCol()
    reg0 = tables.StringCol(4)
    reg1 = tables.StringCol(4)
    reg2 = tables.StringCol(4)
    reg3 = tables.StringCol(4)
    reg4 = tables.StringCol(4)
    writesize = tables.Int64Col()
    halt = tables.BoolCol()  # whether to insert a breakpoint here
示例#15
0
    def store_coincidences(self, cluster=None):
        """Store the previously found coincidences.

        After having searched for coincidences, you can store the more
        user-friendly results in the ``coincidences`` group using this
        method. It also created a ``c_index`` and ``s_index`` table to
        find the source events.

        """
        n_coincidences = len(self._src_c_index)
        if cluster:
            self.cluster = cluster
            self.coincidence_group._v_attrs.cluster = cluster
            s_columns = {
                's%d' % station.number: tables.BoolCol(pos=p)
                for p, station in enumerate(cluster.stations, 12)
            }
        else:
            self.cluster = None
            s_columns = {
                's%d' % n: tables.BoolCol(pos=(n + 12))
                for n, _ in enumerate(self.station_groups)
            }

        description = storage.Coincidence
        description.columns.update(s_columns)
        self.coincidences = self.data.create_table(self.coincidence_group,
                                                   'coincidences',
                                                   description,
                                                   expectedrows=n_coincidences)

        self.c_index = []

        for coincidence in pbar(self._src_c_index, show=self.progress):
            self._store_coincidence(coincidence)

        c_index = self.data.create_vlarray(self.coincidence_group,
                                           'c_index',
                                           tables.UInt32Col(shape=2),
                                           expectedrows=n_coincidences)
        for observables_idx in pbar(self.c_index, show=self.progress):
            c_index.append(observables_idx)
        c_index.flush()

        s_index = self.data.create_vlarray(self.coincidence_group,
                                           's_index',
                                           tables.VLStringAtom(),
                                           expectedrows=len(
                                               self.station_groups))
        for station_group in self.station_groups:
            s_index.append(station_group)
        s_index.flush()
示例#16
0
class CustomRecord(tables.IsDescription):
    class data(tables.IsDescription):

        velocity = tables.Float64Col(pos=1, shape=3)
        density = tables.Float64Col(pos=2)

    mask = tables.BoolCol(pos=1, shape=(3, ))
示例#17
0
    def _create_reconstruction_group_and_tables(self, results_group,
                                                overwrite):
        if results_group in self.data:
            if overwrite:
                self.data.remove_node(results_group, recursive=True)
            else:
                raise RuntimeError(
                    "Result group exists, but overwrite is False")

        head, tail = os.path.split(results_group)
        group = self.data.create_group(head, tail)
        stations_description = {
            's%d' % u: tables.BoolCol()
            for u in self.stations
        }

        description = self.reconstruction_description
        description.update(stations_description)
        self.reconstruction = self.data.create_table(group, 'reconstructions',
                                                     description)

        description = self.reconstruction_coincidence_description
        description.update(stations_description)
        self.reconstruction_coincidences = \
            self.data.create_table(group, 'coincidences', description)

        return group
示例#18
0
class Spectrum_12_4096(tb.IsDescription):
    """ PyTables table descriptor: storage of spectral data
    For hipsr_200_16384 firmware: 200 MHz, 16384 channels
    """
    id = tb.Int32Col(pos=0)  # Unique ID
    timestamp = tb.Time64Col(pos=1)  # Timestamp (at BRAM read)
    xx = tb.UInt32Col(shape=4096, pos=2)  # XX Autocorrelation data
    yy = tb.UInt32Col(shape=4096, pos=3)  # YY Autocorrelation data
    # re_xy      = tb.Int32Col(shape=16384,pos=4)   # XY Cross correlation - real
    #im_xy      = tb.Int32Col(shape=16384,pos=5)   # XY Cross correlation - imag
    xx_cal_on = tb.Int32Col(shape=16, pos=6)  # Noise diode ON, X pol
    xx_cal_off = tb.Int32Col(shape=16, pos=7)  # Noise diode OFF, X pol
    yy_cal_on = tb.Int32Col(shape=16, pos=8)  # Noise diode ON, Y pol
    yy_cal_off = tb.Int32Col(shape=16, pos=9)  # Noise diode OFF, Y pol
    fft_of = tb.BoolCol(pos=10)  # FFT overflow flag
    adc_clip = tb.BoolCol(pos=11)  # ADC clipping flag
示例#19
0
文件: sg_fig.py 项目: rueberger/MJHMC
class LadderTableSchema(tables.IsDescription):
    epsilon = tables.Float32Col()
    num_leapfrog_steps = tables.Int32Col()
    beta = tables.Float32Col()
    ladder_idx = tables.Int32Col()
    distr_hash = tables.Float32Col()
    mjhmc = tables.BoolCol()
示例#20
0
    def _prepare_coincidence_tables(self):
        """Create coincidence tables

        These are the same as the tables created by
        :class:`~sapphire.analysis.coincidences.CoincidencesESD`.
        This makes it easy to link events detected by multiple stations.

        """
        self.coincidence_group = self.data.create_group(self.output_path,
                                                        'coincidences',
                                                        createparents=True)
        try:
            self.coincidence_group._v_attrs.cluster = self.cluster
        except tables.HDF5ExtError:
            warnings.warn('Unable to store cluster object, to large for HDF.')

        description = storage.Coincidence
        s_columns = {
            's%d' % station.number: tables.BoolCol(pos=p)
            for p, station in enumerate(self.cluster.stations, 12)
        }
        description.columns.update(s_columns)

        self.coincidences = self.data.create_table(self.coincidence_group,
                                                   'coincidences', description)

        self.c_index = self.data.create_vlarray(self.coincidence_group,
                                                'c_index',
                                                tables.UInt32Col(shape=2))

        self.s_index = self.data.create_vlarray(self.coincidence_group,
                                                's_index',
                                                tables.VLStringAtom())
示例#21
0
def test_make_tabledef(empty_dataframe):
    tabledef = _make_tabledef(empty_dataframe.to_records(index=False).dtype, 32)
    expected_tabledef = {'int_value'   : tb.Int32Col  (             shape=(), dflt=0    , pos=0),
                         'float_value' : tb.Float32Col(             shape=(), dflt=0    , pos=1),
                         'bool_value'  : tb.BoolCol   (             shape=(), dflt=False, pos=2),
                         'str_value'   : tb.StringCol (itemsize=32, shape=(), dflt=b''  , pos=3)}
    assert tabledef == expected_tabledef
示例#22
0
class Record(tb.IsDescription):
    var1 = tb.StringCol(itemsize=4, dflt=b"abcd", pos=0)
    var2 = tb.StringCol(itemsize=1, dflt=b"a", pos=1)
    var3 = tb.BoolCol(dflt=1)
    var4 = tb.Int8Col(dflt=1)
    var5 = tb.UInt8Col(dflt=1)
    var6 = tb.Int16Col(dflt=1)
    var7 = tb.UInt16Col(dflt=1)
    var8 = tb.Int32Col(dflt=1)
    var9 = tb.UInt32Col(dflt=1)
    var10 = tb.Int64Col(dflt=1)
    var11 = tb.Float32Col(dflt=1.0)
    var12 = tb.Float64Col(dflt=1.0)
    var13 = tb.ComplexCol(itemsize=8, dflt=(1.+0.j))
    var14 = tb.ComplexCol(itemsize=16, dflt=(1.+0.j))
    if hasattr(tb, 'Float16Col'):
        var15 = tb.Float16Col(dflt=1.0)
    if hasattr(tb, 'Float96Col'):
        var16 = tb.Float96Col(dflt=1.0)
    if hasattr(tb, 'Float128Col'):
        var17 = tb.Float128Col(dflt=1.0)
    if hasattr(tb, 'Complex196Col'):
        var18 = tb.ComplexCol(itemsize=24, dflt=(1.+0.j))
    if hasattr(tb, 'Complex256Col'):
        var19 = tb.ComplexCol(itemsize=32, dflt=(1.+0.j))
示例#23
0
def _create_coincidences_tables(file, group, station_groups):
    """Setup coincidence tables

    :return: the created coincidences group.

    """
    coin_group = group + '/coincidences'

    # Create coincidences table
    description = storage.Coincidence
    s_columns = {
        's%d' % station: tables.BoolCol(pos=p)
        for p, station in enumerate(station_groups, 12)
    }
    description.columns.update(s_columns)
    coincidences = file.create_table(coin_group,
                                     'coincidences',
                                     description,
                                     createparents=True)

    # Create c_index
    file.create_vlarray(coin_group, 'c_index', tables.UInt32Col(shape=2))

    # Create and fill s_index
    s_index = file.create_vlarray(coin_group, 's_index', tables.VLStringAtom())
    for station_group in itervalues(station_groups):
        s_index.append(station_group['group'].encode('utf-8'))

    return coincidences._v_parent
示例#24
0
    def prepare_output(self):
        """Prepare output table"""

        dest_path = os.path.join(self.dest_group, self.destination)

        if dest_path in self.dest_data:
            if self.overwrite:
                self.dest_data.remove_node(dest_path, recursive=True)
            else:
                raise RuntimeError("Reconstructions table already exists for "
                                   "%s, and overwrite is False" %
                                   self.dest_group)

        s_columns = {
            's%d' % station.number: tables.BoolCol(pos=p)
            for p, station in enumerate(self.cluster.stations, 26)
        }
        description = ReconstructedCoincidence
        description.columns.update(s_columns)
        self.reconstructions = self.dest_data.create_table(
            self.dest_group,
            self.destination,
            description,
            expectedrows=self.coincidences.nrows,
            createparents=True)
        try:
            self.reconstructions._v_attrs.cluster = self.cluster
        except tables.HDF5ExtError:
            warnings.warn('Unable to store cluster object, to large for HDF.')
class LongWrites(tables.IsDescription):
    breakaddr = tables.UInt64Col()  # where write loop starts
    breakaddrlo = tables.UInt32Col()  # where write loop starts
    breakaddrhi = tables.UInt32Col()  # where write loop starts
    writeaddr = tables.UInt64Col()  # where write loop starts
    writeaddrlo = tables.UInt32Col()  # where write loop starts
    writeaddrhi = tables.UInt32Col()  # where write loop starts
    contaddr = tables.UInt64Col()  # pc after loop
    thumb = tables.BoolCol()  # if write is at thumb address
    inplace = tables.BoolCol()
    writesize = tables.UInt64Col()
    start = tables.UInt64Col()
    startlo = tables.UInt32Col()
    starthi = tables.UInt32Col()
    end = tables.UInt64Col()
    endlo = tables.UInt32Col()
    endhi = tables.UInt32Col()
示例#26
0
class LightningConfig(tables.IsDescription):
    event_id = tables.UInt32Col(pos=0)
    timestamp = tables.Time32Col(pos=1)
    com_port = tables.UInt8Col(pos=2)
    baud_rate = tables.Int16Col(pos=3)
    station_id = tables.UInt32Col(pos=4)
    database_name = tables.Int32Col(dflt=-1, pos=5)
    help_url = tables.Int32Col(dflt=-1, pos=6)
    daq_mode = tables.BoolCol(pos=7)
    latitude = tables.Float64Col(pos=8)
    longitude = tables.Float64Col(pos=9)
    altitude = tables.Float64Col(pos=10)
    squelch_seting = tables.Int32Col(pos=11)
    close_alarm_distance = tables.Int32Col(pos=12)
    severe_alarm_distance = tables.Int32Col(pos=13)
    noise_beep = tables.BoolCol(pos=14)
    minimum_gps_speed = tables.Int32Col(pos=15)
    angle_correction = tables.Float32Col(pos=16)
示例#27
0
class ReconstructedEvent(ReconstructedCoincidence):
    """Store information about reconstructed events

    .. attribute:: id

        Index referring to the id of the event that was reconstructed.

    .. attribute:: d1,d2,d3,d4

        Booleans indicating which detectors participated in the
        reconstruction.

    """

    d1 = tables.BoolCol(pos=22)
    d2 = tables.BoolCol(pos=23)
    d3 = tables.BoolCol(pos=24)
    d4 = tables.BoolCol(pos=25)
示例#28
0
class FramaCDstEntry(tables.IsDescription):
    line = tables.StringCol(512)  # file/lineno
    lvalue = tables.StringCol(512)  # lvalue as reported by framac
    dstlo = tables.UInt32Col()  # low value of write dst range
    dsthi = tables.UInt32Col()  # high value of write dst range
    dst_not_in_ram = tables.BoolCol()  # true if range is not RAM
    writepc = tables.UInt32Col()  # corresponding store instruction PC to src line (if just 1)
    origpc = tables.UInt32Col()  # corresponding store instruction PC to src line (if just 1)
    substage = tables.UInt8Col()
示例#29
0
 class Search(tb.IsDescription):
     nrows = tb.Int32Col(pos=0)
     rowsel = tb.Int32Col(pos=1)
     time1 = tb.Float64Col(pos=2)
     time2 = tb.Float64Col(pos=3)
     tcpu1 = tb.Float64Col(pos=4)
     tcpu2 = tb.Float64Col(pos=5)
     rowsec1 = tb.Float64Col(pos=6)
     rowsec2 = tb.Float64Col(pos=7)
     psyco = tb.BoolCol(pos=8)
示例#30
0
class HDF5Frame(tables.IsDescription):
    timestamp = tables.UInt64Col()  # Microseconds
    sentinel_start = tables.UInt8Col()
    sender_timestamp = tables.UInt32Col()
    sequence = tables.UInt16Col()
    id = tables.UInt16Col()
    rtr = tables.BoolCol()
    length = tables.UInt8Col()
    data = tables.UInt8Col(shape=(8, ))
    sentinel_end = tables.UInt8Col()