class Particle(IsDescription): particle_id = Int32Col() mass = Float64Col() x_pos = Float64Col() y_pos = Float64Col() x_vel = Float64Col() y_vel = Float64Col()
class LocusTable(tables.IsDescription): id = UInt64Col(pos=0) name = StringCol(512, pos=1) #'species_name.chromosome_name.locus_name' chromosome_id = UInt64Col(pos=2) #64 byte-long start = UInt64Col(pos=3) stop = UInt64Col(pos=4) ref_allele = StringCol(64, pos=5) ref_allele_length = UInt64Col( pos=6 ) #this could be larger than 64, good when ref_allele is simply too long ref_allele_frequency = Float64Col(pos=7) alt_allele = StringCol(64, pos=8) alt_allele_length = UInt64Col( pos=9 ) #this could be larger than 64, good when ref_allele is simply too long alt_allele_frequency = Float64Col(pos=10) generation_mutation_arose = Int64Col(pos=11) generation_mutation_fixed = Int64Col(pos=12) mutation_type = StringCol( 4, pos=13 ) #0=synonymous/non-coding, 1=non-synonymous, i=insertion, d=deletion, v=inversion fitness = Float64Col(pos=14) ancestral_amino_acid = StringCol(4, pos=15) #only for mutation_type 0/1 derived_amino_acid = StringCol(4, pos=16)
def _create_h5(filename): # save meta.about to the file. with open_file(filename, 'w') as h5: descriptor1 = { 'field1': Float64Col(), 'field2': Float64Col(), 'field3': Float64Col() } descriptor2 = { 'version': Int32Col(), 'n_rows': Int32Col(), 'n_cols': Int32Col() } h5.create_group('/partition', 'meta', createparents=True) rows_table = h5.create_table('/partition', 'rows', descriptor1) file_header_table = h5.create_table('/partition', 'file_header', descriptor2) for i in range(5): rows_table.row['field1'] = float(i) rows_table.row['field2'] = float(i) rows_table.row['field2'] = float(i) rows_table.row.append() rows_table.flush() file_header_table.row['version'] = 1 file_header_table.row['n_rows'] = 1 file_header_table.row['n_cols'] = 1 file_header_table.row.append() file_header_table.flush()
class GameSummaryAgent(IsDescription): name = StringCol(name_size, pos=0) version = Int32Col(pos=1) game_number = Int32Col(pos=2) rating = Float64Col(pos=3, dflt=0.0) outcome = StringCol(outcome_size, pos=4) total_time = Float64Col(pos=5) time_med = Float64Col(pos=6) time_max = Float64Col(pos=7) state_size_med = Int32Col(pos=8) state_size_max = Int32Col(pos=9)
class Particle(IsDescription): x = Float64Col() y = Float64Col() # Signed 64-bit integer z = Float64Col() # Signed 64-bit integer speed = Float64Col() # Signed 64-bit integer pitch = Float64Col() # Signed 64-bit integer yaw = Float64Col() # Signed 64-bit integer heading_pitch = Float64Col() # Signed 64-bit integer heading_yaw = Float64Col() # Signed 64-bit integer rotation_pitch = Float64Col() # Signed 64-bit integer rotation_yaw = Float64Col() # Signed 64-bit integer
class Table(IsDescription): dt = Time64Col() C1 = Float64Col() P1 = Float64Col() P2 = Float64Col() L1 = Float64Col() L2 = Float64Col() az = Float64Col() el = Float64Col() satx = Float64Col() saty = Float64Col() satz = Float64Col()
class Table(IsDescription): dt = Time64Col() sobs = Float64Col() sprn = Float64Col() az = Float64Col() el = Float64Col() satx = Float64Col() saty = Float64Col() satz = Float64Col() el_map = Float64Col() ipp_lat = Float64Col() ipp_lon = Float64Col()
class McmcParameters(IsDescription): """ Class that holds the data model for the HDF5 table with the MCMC parameters. """ iterations = Int32Col() burnIn = Int32Col() thin = Int32Col() minMeanAbsoluteMagnitude = Float64Col() maxMeanAbsoluteMagnitude = Float64Col() priorTau = StringCol(40) shapeTau = Float64Col() scaleTau = Float64Col()
class CountAssociationLocusTable(tables.IsDescription): """ 2012.12.24 new PyTables-based table definition """ id = UInt64Col(pos=0) min_score = Float64Col(pos=1) min_overlap_ratio = Float64Col(pos=2) total_no_of_results = UInt64Col(pos=3) no_of_association_loci = UInt64Col(pos=4) call_method_id_ls = StringCol(1000, pos=5) cnv_method_id_ls = StringCol(1000, pos=6) phenotype_method_id_ls = StringCol(1000, pos=7) analysis_method_id_ls = StringCol(1000, pos=8)
class AssociationTable(tables.IsDescription): """ 2012.12.18 pytable class to store the genome-wide association result """ id = UInt64Col(pos=0) locus_id = UInt64Col(pos=1, dflt=0) chromosome = StringCol(64, pos=2, dflt='') #64 byte-long start = UInt64Col(pos=3, dflt=0) stop = UInt64Col(pos=4, dflt=0) score = Float64Col(pos=5, dflt=-1) mac = Int64Col(pos=6, dflt=-1) maf = Float64Col(pos=7, dflt=-1) genotype_var_perc = Float64Col(pos=8, dflt=-1) beta_list = Float64Col(shape=(5,), pos=9, dflt=-1) #2013.1.9 beta_pvalue_list = Float64Col(shape=(5,), pos=9, dflt=-1)
def _get_rows_descriptor(columns): """ Converts columns specifications from ambry_sources format to pytables descriptor. Args: columns (list of dict) Returns: dict: valid pytables descriptor. """ TYPE_MAP = { 'int': lambda pos: Int32Col(pos=pos), 'long': lambda pos: Int64Col(pos=pos), 'str': lambda pos: StringCol(itemsize=255, pos=pos), 'bytes': lambda pos: StringCol(itemsize=255, pos=pos), 'float': lambda pos: Float64Col(pos=pos), 'unknown': lambda pos: StringCol(itemsize=255, pos=pos), } descriptor = {} for column in columns: pytables_type = TYPE_MAP.get(column['type']) if not pytables_type: raise Exception( 'Failed to convert `{}` ambry_sources type to pytables type.'. format(column['type'])) descriptor[column['name']] = pytables_type(column['pos']) return descriptor
class SSReadoutTableDs(IsDescription): iro = UInt64Col() # readout numner/index time = UInt64Col() # TACK timestamp cpu_t = Float64Col() # native python timestamp float64 cpu_t_s = UInt64Col() # seconds time stamp uint64 cpu_t_ns = UInt64Col() # nano seconds time stamp uint64 data = Float32Col((N_TM, N_TM_PIX)) # 2D data array containing
class Record(tables.IsDescription): var1 = StringCol(itemsize=4, dflt=b"abcd", pos=0) var2 = StringCol(itemsize=1, dflt=b"a", pos=1) var3 = BoolCol(dflt=1) var4 = Int8Col(dflt=1) var5 = UInt8Col(dflt=1) var6 = Int16Col(dflt=1) var7 = UInt16Col(dflt=1) var8 = Int32Col(dflt=1) var9 = UInt32Col(dflt=1) var10 = Int64Col(dflt=1) var11 = Float32Col(dflt=1.0) var12 = Float64Col(dflt=1.0) var13 = ComplexCol(itemsize=8, dflt=(1. + 0.j)) var14 = ComplexCol(itemsize=16, dflt=(1. + 0.j)) if hasattr(tables, 'Float16Col'): var15 = tables.Float16Col(dflt=1.0) if hasattr(tables, 'Float96Col'): var16 = tables.Float96Col(dflt=1.0) if hasattr(tables, 'Float128Col'): var17 = tables.Float128Col(dflt=1.0) if hasattr(tables, 'Complex196Col'): var18 = tables.ComplexCol(itemsize=24, dflt=(1. + 0.j)) if hasattr(tables, 'Complex256Col'): var19 = tables.ComplexCol(itemsize=32, dflt=(1. + 0.j))
class AssociationLandscapeTable(tables.IsDescription): id = UInt64Col(pos=0) start_locus_id = UInt64Col(pos=1) stop_locus_id = UInt64Col(pos=2) no_of_loci = UInt64Col(pos=3) deltaX = UInt64Col(pos=4) deltaY = Float64Col(pos=5)
class AgentVersionRow(IsDescription): version = Int32Col(pos=0, dflt=0) won = Int32Col(pos=1, dflt=0) lost = Int32Col(pos=2, dflt=0) drawn = Int32Col(pos=3, dflt=0) failed = Int32Col(pos=4, dflt=0) rating = Float64Col(pos=5, dflt=0.0) uploaded = TimeStamp()
class IndividualTable(tables.IsDescription): id = UInt64Col(pos=0) family_id = StringCol(512, pos=1) #64 byte-long name = StringCol(512, pos=2) # name should look like 'species_name.population_name.individual_name' to ensure uniqueness father_name = StringCol(512, pos=3) mother_name = StringCol(512, pos=4) sex = UInt64Col(pos=5) #0 is unknown, 1=male, 2=female phenotype = Float64Col(pos=6) population_id = UInt64Col(pos=7)
class TwoAssociationLocusOverlapTable(tables.IsDescription): """ 2012.12.24 new PyTables-based table definition """ id = UInt64Col(pos=0) chromosome = StringCol(64, pos=1) start = UInt64Col(pos=2) stop = UInt64Col(pos=3) fractionCoveredByAssociation2 = Float64Col(pos=4)
class Particle(IsDescription): name = StringCol(16) # 16-character String idnumber = Int64Col() # Signed 64-bit integer ADCcount = UInt16Col() # Unsigned short integer TDCcount = UInt8Col() # unsigned byte grid_i = Int32Col() # 32-bit integer grid_j = Int32Col() # 32-bit integer pressure = Float32Col() # float (single-precision) energy = Float64Col() # double (double-precision)
def test_reads_lines_to_dict(self): temp_fs = fsopendir('temp://') # save meta.about to the file. descriptor = {'load_time': Float64Col(), 'create_time': Float64Col()} self._write_test_meta(temp_fs, 'about', descriptor, [[('load_time', 1.0), ('create_time', 1.1)]]) # now read it from file. with open_file(temp_fs.getsyspath('temp.h5'), 'r') as h5: ret = HDFReader._read_meta_child(h5, 'about') self.assertIsInstance(ret, list) first = ret[0] self.assertIn('load_time', first) self.assertEqual(first['load_time'], 1.0) self.assertIn('create_time', first) self.assertEqual(first['create_time'], 1.1)
class AssociationPeakTable(tables.IsDescription): id = UInt64Col(pos=0) chromosome = StringCol(64, pos=1) #64 byte-long start = UInt64Col(pos=2) stop = UInt64Col(pos=3) start_locus_id = UInt64Col(pos=4) stop_locus_id = UInt64Col(pos=5) no_of_loci = UInt64Col(pos=6) peak_locus_id = UInt64Col(pos=7) peak_score = Float64Col(pos=8)
def _save_source(self): descriptor = { 'fetch_time': Float64Col(), 'encoding': StringCol(itemsize=255), 'url': StringCol(itemsize=1024), 'file_type': StringCol(itemsize=50), 'inner_file': StringCol(itemsize=255), 'url_type': StringCol(itemsize=255), } self._save_meta_child('source', descriptor)
class Table(IsDescription): dt = Time64Col() stec = Float64Col() sprn = Float64Col() az = Float64Col() el = Float64Col() satx = Float64Col() saty = Float64Col() satz = Float64Col()
def setup_peakmap_table(self): if not hasattr(self.node, "pm_table"): description = {} description["unique_id"] = StringCol(itemsize=64, pos=0) description["index"] = UInt32Col(pos=1) description["ms_levels"] = StringCol(itemsize=self.MSLEVEL_FIELD_SIZE, pos=2) description["rtmin_1"] = Float32Col(pos=3) description["rtmax_1"] = Float32Col(pos=4) description["rtmin_2"] = Float32Col(pos=5) description["rtmax_2"] = Float32Col(pos=6) description["mzmin_1"] = Float64Col(pos=7) description["mzmax_1"] = Float64Col(pos=8) description["mzmin_2"] = Float64Col(pos=9) description["mzmax_2"] = Float64Col(pos=10) pm_table = self.file_.create_table(self.node, 'pm_table', description, filters=filters) # every colums which appears in a where method call should/must be indexed ! # this is not only for performance but for correct lookup as well (I had strange bugs # else) pm_table.cols.unique_id.create_index() pm_table.cols.index.create_index()
class AssociationLocusTable(tables.IsDescription): """ 2013.1.28 bugfix, was pos=3 for no_of_peaks (same as stop), now change it to pos=4, and increment others accordingly """ id = UInt64Col(pos=0) chromosome = StringCol(64, pos=1) #64 byte-long start = UInt64Col(pos=2) stop = UInt64Col(pos=3) no_of_peaks = UInt64Col(pos=4) connectivity = Float64Col(pos=5) no_of_results = UInt64Col(pos=6) phenotype_id_ls_in_str = StringCol(1000, pos=7)
class SurveyData(IsDescription): """ Class that holds the data model for the data from the simulated parallax survey. Intended for use with the HDF5 files through the pytables package. """ trueParallaxes = Float64Col(S.numberOfStarsInSurvey) absoluteMagnitudes = Float64Col(S.numberOfStarsInSurvey) apparentMagnitudes = Float64Col(S.numberOfStarsInSurvey) parallaxErrors = Float64Col(S.numberOfStarsInSurvey) magnitudeErrors = Float64Col(S.numberOfStarsInSurvey) observedParallaxes = Float64Col(S.numberOfStarsInSurvey) observedMagnitudes = Float64Col(S.numberOfStarsInSurvey)
class Equity(IsDescription): SYMBOL = StringCol(50) SERIES = StringCol(2) OPEN = Float32Col() HIGH = Float32Col() LOW = Float32Col() CLOSE = Float32Col() LAST = Float32Col() PREVCLOSE = Float32Col() TOTTRDQTY = Int32Col() TOTTRDVAL = Float64Col() TIMESTAMP = StringCol(12) TOTALTRADES = Int32Col() ISIN = StringCol(12)
def init_static_fields(self, shape, fields): # create tab self.static_fields = set(fields) cols_desc = {f: Float64Col(shape) for f in fields} self.tab_static_fields = self.f.create_table(self.f.root, 'static_fields', cols_desc, 'Static fields') self.tab_static_fields.row.append() self.tab_static_fields.flush() # save values for k, v in fields.items(): self[k] = v
def init_dynamic_scalars_and_fields(self, shape, scalars, fields): # create scalars tab self.dynamic_scalars = set(scalars) cols_desc = {c: Float64Col() for c in scalars} cols_desc['t'] = Float32Col() self.tab_dynamic_scalars = self.f.create_table(self.f.root, 'dynamic_scalars', cols_desc, 'Dynamic scalars') self.tab_dynamic_scalars.row.append() self.tab_dynamic_scalars.flush() # create fields tab self.dynamic_fields = set(fields) cols_desc = {f: Float64Col(shape) for f in fields} cols_desc['t'] = Float32Col() self.tab_dynamic_fields = self.f.create_table(self.f.root, 'dynamic_fields', cols_desc, 'Dynamic fields') # snapshot self.snapshot(t=0.0, **scalars, **fields)
class Derivative(IsDescription): INSTRUMENT = StringCol(6) SYMBOL = StringCol(50) EXPIRY_DT = StringCol(11) STRIKE_PR = Int32Col() OPTION_TYP = StringCol(2) OPEN = Float32Col() HIGH = Float32Col() LOW = Float32Col() CLOSE = Float32Col() SETTLE_PR = Float32Col() CONTRACTS = Int32Col() VAL_INLAKH = Float64Col() OPEN_INT = Int32Col() CHG_IN_OI = Int32Col() TIMESTAMP = StringCol(11)
def init_static_scalars(self, scalars): # create tab self.static_scalars = set(scalars) if not self.static_scalars: return cols_desc = {c: Float64Col() for c in scalars} self.tab_static_scalars = self.f.create_table(self.f.root, 'static_scalars', cols_desc, 'Static scalars') self.tab_static_scalars.row.append() self.tab_static_scalars.flush() # save values for k, v in scalars.items(): self[k] = v