class EEGTick(tables.IsDescription):
    state = tables.StringCol(16)
    packet = tables.UInt64Col()
    tick = tables.UInt8Col()
    tick_time = tables.UInt64Col()
    packets_skipped = tables.UInt8Col()
    battery = tables.UInt8Col()
    gyroX = tables.UInt16Col()
    gyroY = tables.UInt16Col()
    F3 = tables.Float32Col()
    FC5 = tables.Float32Col()
    AF3 = tables.Float32Col()
    F7 = tables.Float32Col()
    T7 = tables.Float32Col()
    P7 = tables.Float32Col()
    O1 = tables.Float32Col()
    O2 = tables.Float32Col()
    P8 = tables.Float32Col()
    T8 = tables.Float32Col()
    F8 = tables.Float32Col()
    AF4 = tables.Float32Col()
    FC6 = tables.Float32Col()
    F4 = tables.Float32Col()
    F3_QUAL = tables.Float32Col()
    FC5_QUAL = tables.Float32Col()
    AF3_QUAL = tables.Float32Col()
    F7_QUAL = tables.Float32Col()
    T7_QUAL = tables.Float32Col()
    P7_QUAL = tables.Float32Col()
    O1_QUAL = tables.Float32Col()
    O2_QUAL = tables.Float32Col()
    P8_QUAL = tables.Float32Col()
    T8_QUAL = tables.Float32Col()
    F8_QUAL = tables.Float32Col()
    AF4_QUAL = tables.Float32Col()
    FC6_QUAL = tables.Float32Col()
    F4_QUAL = tables.Float32Col()
Beispiel #2
0
    def saveWorld(self):
        '''TODO: check if we are currently working on a world, save it.
        if not, we ignore the command. '''
        self.updateWorld()
        alreadyTried = False
        if not self.fileLocation and not alreadyTried:
            alreadyTried = True
            self.saveWorldAs()
        else:
            h5Filter = tables.Filters(complevel=9,
                                      complib='zlib',
                                      shuffle=True,
                                      fletcher32=True)
            h5file = tables.openFile(self.fileLocation,
                                     mode='w',
                                     title="worldData",
                                     filters=h5Filter)

            # store our numpy datasets
            for k in self.world:
                if self.world[k] is not None:
                    atom = tables.Atom.from_dtype(self.world[k].dtype)
                    shape = self.world[k].shape
                    cArray = h5file.createCArray(h5file.root, k, atom, shape)
                    cArray[:] = self.world[k]

            # store our world settings
            pyDict = {
                'key': tables.StringCol(itemsize=40),
                'value': tables.UInt16Col(),
            }
            settingsTable = h5file.createTable('/', 'settings', pyDict)

            settings = dict(width=self.mapSize[0],
                            height=self.mapSize[1],
                            algorithm=self.algorithm,
                            roughness=self.roughness,
                            avgLandmass=self.avgLandmass,
                            avgElevation=self.avgElevation,
                            hasMountains=self.hasMountains,
                            hemisphere=self.hemisphere,
                            isIsland=self.isIsland,
                            seaLevel=self.seaLevel)

            settingsTable.append(list(settings.items()))
            settingsTable.cols.key.createIndex()  # create an index

            h5file.close()
            del h5file, h5Filter
def create_event_tel_waveform(hfile,
                              tel_node,
                              nb_gain,
                              image_shape,
                              telId,
                              chunkshape=1):
    """
	Create the waveform tables into the given telescope node
	Parameters:
		hfile : HDF5 file to be used
		tel_node : telescope to be completed
		nb_gain : number of gains of the camera
		image_shape : shape of the camera images (number of slices, number of pixels)
		telId : id of the telescope
		chunkshape : shape of the chunk to be used to store the data
	"""
    if nb_gain > 1:
        columns_dict_waveform = {
            'event_id': tables.UInt64Col(),
            "waveformHi": tables.UInt16Col(shape=image_shape),
            "waveformLo": tables.UInt16Col(shape=image_shape)
        }
    else:
        columns_dict_waveform = {
            'event_id': tables.UInt64Col(),
            "waveformHi": tables.UInt16Col(shape=image_shape)
        }

    description_waveform = type('description columns_dict_waveform',
                                (tables.IsDescription, ),
                                columns_dict_waveform)
    hfile.create_table(tel_node,
                       'tel_{0:0=3d}'.format(telId),
                       description_waveform,
                       "Table of waveform of the high gain signal",
                       chunkshape=chunkshape)
Beispiel #4
0
class ClusterHitInfoTable(tb.IsDescription):
    event_number = tb.Int64Col(pos=0)
    frame = tb.UInt8Col(pos=1)
    column = tb.UInt16Col(pos=2)
    row = tb.UInt16Col(pos=3)
    charge = tb.UInt16Col(pos=4)
    cluster_id = tb.UInt16Col(pos=5)
    is_seed = tb.UInt8Col(pos=6)
    cluster_size = tb.UInt16Col(pos=7)
    n_cluster = tb.UInt16Col(pos=8)
class ClusterHitInfoTable(tb.IsDescription):
    event_number = tb.Int64Col(pos=0)
    trigger_number = tb.UInt32Col(pos=1)
    relative_BCID = tb.UInt8Col(pos=2)
    LVL1ID = tb.UInt16Col(pos=3)
    column = tb.UInt8Col(pos=4)
    row = tb.UInt16Col(pos=5)
    tot = tb.UInt8Col(pos=6)
    BCID = tb.UInt16Col(pos=7)
    TDC = tb.UInt16Col(pos=8)
    TDC_time_stamp = tb.UInt8Col(pos=9)
    trigger_status = tb.UInt8Col(pos=10)
    service_record = tb.UInt32Col(pos=11)
    event_status = tb.UInt16Col(pos=12)
    cluster_id = tb.UInt16Col(pos=13)
    is_seed = tb.UInt8Col(pos=14)
    cluster_size = tb.UInt16Col(pos=15)
    n_cluster = tb.UInt16Col(pos=16)
def create_sorted_waveform_table_shape(hfile, cam_tel_group, nameWaveformHi, dataEntryShape, chunkshape=1):
	"""
	Create the table to store the signal
	Parameters:
		hfile : HDF5 file to be used
		cam_tel_group : telescope group in which to put the tables
		nameWaveformHi : name of the table to store the waveform
		dataEntryShape : shape of the entries to be stored
		chunkshape : shape of the chunk to be used to store the data of waveform and minimum
	Return:
		create table
	"""
	columns_dict_waveformHi  = {nameWaveformHi: tables.UInt16Col(shape=dataEntryShape)}
	description_waveformHi = type('description columns_dict_waveformHi', (tables.IsDescription,),
								  columns_dict_waveformHi)
	return hfile.create_table(cam_tel_group, nameWaveformHi, description_waveformHi, "Table of waveform of the signal",
							  chunkshape=chunkshape)
Beispiel #7
0
class Info2D(PT.IsDescription):
    camn = PT.UInt16Col(pos=0)
    frame = PT.Int64Col(pos=1)
    timestamp = PT.FloatCol(
        pos=2
    )  # when the image trigger happened (returned by timestamp modeler on MainBrain)
    cam_received_timestamp = PT.FloatCol(
        pos=3
    )  # when the image was acquired by flydra software (on camera computer)
    x = PT.Float32Col(pos=4)
    y = PT.Float32Col(pos=5)
    area = PT.Float32Col(pos=6)
    slope = PT.Float32Col(pos=7)
    eccentricity = PT.Float32Col(pos=8)
    frame_pt_idx = PT.UInt8Col(
        pos=9)  # index of point if there were > 1 points in frame
    cur_val = PT.UInt8Col(pos=10)
    mean_val = PT.Float32Col(pos=11)
    sumsqf_val = PT.Float32Col(pos=12)  # estimate of <x^2> (running_sumsqf)
class Particle(tb.IsDescription):
    # 16-character String
    name = tb.StringCol(16)
    # signed 64-bit integer
    idnumber = tb.Int64Col()
    # unsigned short integer
    ADCcount = tb.UInt16Col()
    # unsigned byte
    TDCcount = tb.UInt8Col()
    # integer
    grid_i = tb.Int32Col()
    # integer
    grid_j = tb.Int32Col()

    # A sub-structure (nested data-type)
    class Properties(tb.IsDescription):
        # 2-D float array (single-precision)
        pressure = tb.Float32Col(shape=(2, 3))
        # 3-D float array (double-precision)
        energy = tb.Float64Col(shape=(2, 3, 4))
Beispiel #9
0
class ProteinTable(tables.IsDescription):
    EntryNr = tables.UInt32Col(pos=1)
    SeqBufferOffset = tables.UInt64Col(pos=2)
    SeqBufferLength = tables.UInt32Col(pos=3)
    OmaGroup = tables.UInt32Col(pos=4, dflt=0)
    OmaHOG = tables.StringCol(255, pos=5, dflt=b"")
    Chromosome = tables.StringCol(255, pos=6)
    LocusStart = tables.UInt32Col(pos=7)
    LocusEnd = tables.UInt32Col(pos=8)
    LocusStrand = tables.Int8Col(pos=9, dflt=1)
    AltSpliceVariant = tables.Int32Col(pos=10, dflt=0)
    CanonicalId = tables.StringCol(20, pos=11, dflt=b"")
    CDNABufferOffset = tables.UInt64Col(pos=12)
    CDNABufferLength = tables.UInt32Col(pos=13)
    MD5ProteinHash = tables.StringCol(32, pos=14)
    DescriptionOffset = tables.UInt32Col(pos=15)
    DescriptionLength = tables.UInt16Col(pos=16)
    SubGenome = tables.StringCol(1, pos=17, dflt=b"")
    RootHogUpstream = tables.Int32Col(pos=18, dflt=-1)
    RootHogDownStream = tables.Int32Col(pos=19, dflt=-1)
def create_sorted_waveform_table(hfile, cam_tel_group, nameWaveformHi, nbSlice, nbPixel, isStoreSlicePixel,
								 chunkshape=1):
	"""
	Create the table to store the signal
	Parameters:
		hfile : HDF5 file to be used
		cam_tel_group : telescope group in which to put the tables
		nameWaveformHi : name of the table to store the waveform
		nbSlice : number of slices of the signal
		nbPixel : number of pixels of the camera
		isStoreSlicePixel : true to store data per slice and pixel, false for pixel and slice
		chunkshape : shape of the chunk to be used to store the data of waveform and minimum
	"""
	image_shape = (nbPixel, nbSlice)
	if isStoreSlicePixel:
		image_shape = (nbSlice, nbPixel)
	columns_dict_waveformHi  = {nameWaveformHi: tables.UInt16Col(shape=image_shape)}
	description_waveformHi = type('description columns_dict_waveformHi', (tables.IsDescription,),
								  columns_dict_waveformHi)
	hfile.create_table(cam_tel_group, nameWaveformHi, description_waveformHi, "Table of waveform of the signal",
					   chunkshape=chunkshape)
Beispiel #11
0
    def get_table_def(self):
        """
        Returns a dict of column definitions using multidimensional
        hdf5 columns. Columns for parameters and likelihoods are atomic and resemble
        the csv datawriter. If ``save_sim=True``, the simulation array is saved as an array value in
        a single multidimensional table cell

        cf.: https://www.pytables.org/usersguide/tutorials.html#multidimensional-table-cells-and-automatic-sanity-checks

        """
        # Position of likelihood columns
        like_pos = 0
        # Start position of parameter columns
        param_pos = np.array(self.like).size
        # Start position of simulation columns
        sim_pos = param_pos + np.array(self.randompar).size
        chain_pos = sim_pos

        dtype = np.dtype(self.db_precision)
        columns = {
            self.header[i]: tables.Col.from_dtype(dtype, pos=i)
            for i in range(like_pos, sim_pos)
        }

        if self.save_sim:
            # Get the shape of the simulation
            sim_shape = np.array(self.simulations).shape
            # Get the appropriate dtype for the n-d cell
            # (tables.Col.from_dtype does not take a shape parameter)
            sim_dtype = np.dtype((self.db_precision, sim_shape))
            columns['simulation'] = tables.Col.from_dtype(
                sim_dtype, pos=sim_pos
            )
            chain_pos += 1
        # Add a column chains
        columns['chains'] = tables.UInt16Col(pos=chain_pos)

        return columns
Beispiel #12
0
class ConfTable(tb.IsDescription):
    configuration = tb.StringCol(64)
    value = tb.UInt16Col()
Beispiel #13
0
class DacTable(tb.IsDescription):
    DAC = tb.StringCol(64)
    value = tb.UInt16Col()
Beispiel #14
0
class HisparcConfiguration(tables.IsDescription):
    event_id = tables.UInt32Col()
    timestamp = tables.Time32Col()
    gps_latitude = tables.Float64Col()
    gps_longitude = tables.Float64Col()
    gps_altitude = tables.Float64Col()
    mas_version = tables.Int32Col(dflt=-1)
    slv_version = tables.Int32Col(dflt=-1)
    trig_low_signals = tables.UInt32Col()
    trig_high_signals = tables.UInt32Col()
    trig_external = tables.UInt32Col()
    trig_and_or = tables.BoolCol()
    precoinctime = tables.Float64Col()
    coinctime = tables.Float64Col()
    postcoinctime = tables.Float64Col()
    detnum = tables.UInt16Col()
    password = tables.Int32Col(dflt=-1)
    spare_bytes = tables.UInt8Col()
    use_filter = tables.BoolCol()
    use_filter_threshold = tables.BoolCol()
    reduce_data = tables.BoolCol()
    buffer = tables.Int32Col(dflt=-1)
    startmode = tables.BoolCol()
    delay_screen = tables.Float64Col()
    delay_check = tables.Float64Col()
    delay_error = tables.Float64Col()
    mas_ch1_thres_low = tables.Float64Col()
    mas_ch1_thres_high = tables.Float64Col()
    mas_ch2_thres_low = tables.Float64Col()
    mas_ch2_thres_high = tables.Float64Col()
    mas_ch1_inttime = tables.Float64Col()
    mas_ch2_inttime = tables.Float64Col()
    mas_ch1_voltage = tables.Float64Col()
    mas_ch2_voltage = tables.Float64Col()
    mas_ch1_current = tables.Float64Col()
    mas_ch2_current = tables.Float64Col()
    mas_comp_thres_low = tables.Float64Col()
    mas_comp_thres_high = tables.Float64Col()
    mas_max_voltage = tables.Float64Col()
    mas_reset = tables.BoolCol()
    mas_ch1_gain_pos = tables.UInt8Col()
    mas_ch1_gain_neg = tables.UInt8Col()
    mas_ch2_gain_pos = tables.UInt8Col()
    mas_ch2_gain_neg = tables.UInt8Col()
    mas_ch1_offset_pos = tables.UInt8Col()
    mas_ch1_offset_neg = tables.UInt8Col()
    mas_ch2_offset_pos = tables.UInt8Col()
    mas_ch2_offset_neg = tables.UInt8Col()
    mas_common_offset = tables.UInt8Col()
    mas_internal_voltage = tables.UInt8Col()
    mas_ch1_adc_gain = tables.Float64Col()
    mas_ch1_adc_offset = tables.Float64Col()
    mas_ch2_adc_gain = tables.Float64Col()
    mas_ch2_adc_offset = tables.Float64Col()
    mas_ch1_comp_gain = tables.Float64Col()
    mas_ch1_comp_offset = tables.Float64Col()
    mas_ch2_comp_gain = tables.Float64Col()
    mas_ch2_comp_offset = tables.Float64Col()
    slv_ch1_thres_low = tables.Float64Col()
    slv_ch1_thres_high = tables.Float64Col()
    slv_ch2_thres_low = tables.Float64Col()
    slv_ch2_thres_high = tables.Float64Col()
    slv_ch1_inttime = tables.Float64Col()
    slv_ch2_inttime = tables.Float64Col()
    slv_ch1_voltage = tables.Float64Col()
    slv_ch2_voltage = tables.Float64Col()
    slv_ch1_current = tables.Float64Col()
    slv_ch2_current = tables.Float64Col()
    slv_comp_thres_low = tables.Float64Col()
    slv_comp_thres_high = tables.Float64Col()
    slv_max_voltage = tables.Float64Col()
    slv_reset = tables.BoolCol()
    slv_ch1_gain_pos = tables.UInt8Col()
    slv_ch1_gain_neg = tables.UInt8Col()
    slv_ch2_gain_pos = tables.UInt8Col()
    slv_ch2_gain_neg = tables.UInt8Col()
    slv_ch1_offset_pos = tables.UInt8Col()
    slv_ch1_offset_neg = tables.UInt8Col()
    slv_ch2_offset_pos = tables.UInt8Col()
    slv_ch2_offset_neg = tables.UInt8Col()
    slv_common_offset = tables.UInt8Col()
    slv_internal_voltage = tables.UInt8Col()
    slv_ch1_adc_gain = tables.Float64Col()
    slv_ch1_adc_offset = tables.Float64Col()
    slv_ch2_adc_gain = tables.Float64Col()
    slv_ch2_adc_offset = tables.Float64Col()
    slv_ch1_comp_gain = tables.Float64Col()
    slv_ch1_comp_offset = tables.Float64Col()
    slv_ch2_comp_gain = tables.Float64Col()
    slv_ch2_comp_offset = tables.Float64Col()
Beispiel #15
0
class Hit(tb.IsDescription):
    channelID = tb.UInt16Col(pos=0)
    hit_data = tb.UInt16Col(pos=1)
Beispiel #16
0
from .tools.radians import (radian, xy_to_deg_vec, xy_to_rad_vec,
    get_angle_array, get_angle_histogram, circle_diff, circle_diff_vec_deg,
    unwrap_deg)

# Constants
DEBUG = Config['debug_mode']
INVALID = Config['track']['invalid_sample']
BOXCAR_KERNEL = Config['track']['boxcar_kernel']
INNER_DIAMETER = Config['track']['inner_diameter']
OUTER_DIAMETER = Config['track']['outer_diameter']
TRACK_RADIUS = (INNER_DIAMETER + OUTER_DIAMETER) / 4
TRACK_WIDTH = (OUTER_DIAMETER - INNER_DIAMETER) / 2

# Spatial information data table
CellInfoDescr = {       'id'                : tb.UInt64Col(pos=1),
                        'rat'               : tb.UInt16Col(pos=2),
                        'day'               : tb.UInt8Col(pos=3),
                        'session'           : tb.UInt8Col(pos=4),
                        'tc'                : tb.StringCol(itemsize=8, pos=5),
                        'area'              : tb.StringCol(itemsize=16, pos=6),
                        'quality'           : tb.StringCol(itemsize=16, pos=7),
                        'spike_width'       : tb.FloatCol(pos=8),
                        'N_running'         : tb.UInt16Col(pos=9),
                        'I'                 : tb.FloatCol(pos=10),
                        'p_value'           : tb.FloatCol(pos=11)   }


class TrajectoryData(HasTraits):

    """
    Smart container for the trajectory tracking data of a recording session
class IceBridgeFlightLineDescriptor(T.IsDescription):
    Flightline_ID = T.UInt32Col(pos=0)
    name = T.StringCol(itemsize=20, pos=1)
    year = T.UInt16Col(pos=2)
    month = T.UInt8Col(pos=3)
    day = T.UInt8Col(pos=4)
Beispiel #18
0
class OmaGroupTable(tables.IsDescription):
    GroupNr = tables.UInt32Col(pos=0)
    Fingerprint = tables.StringCol(7, pos=1)
    KeywordOffset = tables.UInt32Col(pos=2)
    KeywordLength = tables.UInt16Col(pos=3)
    NrMembers = tables.UInt16Col(pos=4)
Beispiel #19
0
from scanr.spike import TetrodeSelect, find_theta_tetrode
from scanr.data import get_node
from scanr.time import time_slice, select_from
from scanr.meta import get_maze_list
from scanr.eeg import get_eeg_timeseries, Ripple, Theta

# Local imports
from .core.analysis import AbstractAnalysis
from .core.report import BaseReport
from scanr.tools.misc import Reify
from scanr.tools.plot import quicktitle, grouped_bar_plot
from scanr.tools.stats import t_welch, t_one_tailed

# Table descriptions
BehDescr = {
    'id': tb.UInt16Col(pos=1),
    'rat': tb.UInt16Col(pos=2),
    'theta_avg': tb.Float32Col(pos=3),
    'theta_max': tb.Float32Col(pos=4),
    'ripples': tb.UInt16Col(pos=5)
}

RippleDescr = {
    'rat': tb.UInt16Col(pos=1),
    'theta': tb.Float32Col(pos=2),
    'running': tb.BoolCol(pos=3),
    'pause': tb.BoolCol(pos=4),
    'scan': tb.BoolCol(pos=5)
}

Beispiel #20
0
class HisparcConfiguration(tables.IsDescription):
    event_id = tables.UInt32Col(pos=0)
    timestamp = tables.Time32Col(pos=1)
    gps_latitude = tables.Float64Col(pos=2)
    gps_longitude = tables.Float64Col(pos=3)
    gps_altitude = tables.Float64Col(pos=4)
    mas_version = tables.Int32Col(dflt=-1, pos=5)
    slv_version = tables.Int32Col(dflt=-1, pos=6)
    trig_low_signals = tables.UInt32Col(pos=7)
    trig_high_signals = tables.UInt32Col(pos=8)
    trig_external = tables.UInt32Col(pos=9)
    trig_and_or = tables.BoolCol(pos=10)
    precoinctime = tables.Float64Col(pos=11)
    coinctime = tables.Float64Col(pos=12)
    postcoinctime = tables.Float64Col(pos=13)
    detnum = tables.UInt16Col(pos=14)
    password = tables.Int32Col(dflt=-1, pos=15)
    spare_bytes = tables.UInt8Col(pos=16)
    use_filter = tables.BoolCol(pos=17)
    use_filter_threshold = tables.BoolCol(pos=18)
    reduce_data = tables.BoolCol(pos=19)
    buffer = tables.Int32Col(dflt=-1, pos=20)
    startmode = tables.BoolCol(pos=21)
    delay_screen = tables.Float64Col(pos=22)
    delay_check = tables.Float64Col(pos=23)
    delay_error = tables.Float64Col(pos=24)
    mas_ch1_thres_low = tables.Float64Col(pos=25)
    mas_ch1_thres_high = tables.Float64Col(pos=26)
    mas_ch2_thres_low = tables.Float64Col(pos=27)
    mas_ch2_thres_high = tables.Float64Col(pos=28)
    mas_ch1_inttime = tables.Float64Col(pos=29)
    mas_ch2_inttime = tables.Float64Col(pos=30)
    mas_ch1_voltage = tables.Float64Col(pos=31)
    mas_ch2_voltage = tables.Float64Col(pos=32)
    mas_ch1_current = tables.Float64Col(pos=33)
    mas_ch2_current = tables.Float64Col(pos=34)
    mas_comp_thres_low = tables.Float64Col(pos=35)
    mas_comp_thres_high = tables.Float64Col(pos=36)
    mas_max_voltage = tables.Float64Col(pos=37)
    mas_reset = tables.BoolCol(pos=38)
    mas_ch1_gain_pos = tables.UInt8Col(pos=39)
    mas_ch1_gain_neg = tables.UInt8Col(pos=40)
    mas_ch2_gain_pos = tables.UInt8Col(pos=41)
    mas_ch2_gain_neg = tables.UInt8Col(pos=42)
    mas_ch1_offset_pos = tables.UInt8Col(pos=43)
    mas_ch1_offset_neg = tables.UInt8Col(pos=44)
    mas_ch2_offset_pos = tables.UInt8Col(pos=45)
    mas_ch2_offset_neg = tables.UInt8Col(pos=46)
    mas_common_offset = tables.UInt8Col(pos=47)
    mas_internal_voltage = tables.UInt8Col(pos=48)
    mas_ch1_adc_gain = tables.Float64Col(pos=49)
    mas_ch1_adc_offset = tables.Float64Col(pos=50)
    mas_ch2_adc_gain = tables.Float64Col(pos=51)
    mas_ch2_adc_offset = tables.Float64Col(pos=52)
    mas_ch1_comp_gain = tables.Float64Col(pos=53)
    mas_ch1_comp_offset = tables.Float64Col(pos=54)
    mas_ch2_comp_gain = tables.Float64Col(pos=55)
    mas_ch2_comp_offset = tables.Float64Col(pos=56)
    slv_ch1_thres_low = tables.Float64Col(pos=57)
    slv_ch1_thres_high = tables.Float64Col(pos=58)
    slv_ch2_thres_low = tables.Float64Col(pos=59)
    slv_ch2_thres_high = tables.Float64Col(pos=60)
    slv_ch1_inttime = tables.Float64Col(pos=61)
    slv_ch2_inttime = tables.Float64Col(pos=62)
    slv_ch1_voltage = tables.Float64Col(pos=63)
    slv_ch2_voltage = tables.Float64Col(pos=64)
    slv_ch1_current = tables.Float64Col(pos=65)
    slv_ch2_current = tables.Float64Col(pos=66)
    slv_comp_thres_low = tables.Float64Col(pos=67)
    slv_comp_thres_high = tables.Float64Col(pos=68)
    slv_max_voltage = tables.Float64Col(pos=69)
    slv_reset = tables.BoolCol(pos=70)
    slv_ch1_gain_pos = tables.UInt8Col(pos=71)
    slv_ch1_gain_neg = tables.UInt8Col(pos=72)
    slv_ch2_gain_pos = tables.UInt8Col(pos=73)
    slv_ch2_gain_neg = tables.UInt8Col(pos=74)
    slv_ch1_offset_pos = tables.UInt8Col(pos=75)
    slv_ch1_offset_neg = tables.UInt8Col(pos=76)
    slv_ch2_offset_pos = tables.UInt8Col(pos=77)
    slv_ch2_offset_neg = tables.UInt8Col(pos=78)
    slv_common_offset = tables.UInt8Col(pos=79)
    slv_internal_voltage = tables.UInt8Col(pos=80)
    slv_ch1_adc_gain = tables.Float64Col(pos=81)
    slv_ch1_adc_offset = tables.Float64Col(pos=82)
    slv_ch2_adc_gain = tables.Float64Col(pos=83)
    slv_ch2_adc_offset = tables.Float64Col(pos=84)
    slv_ch1_comp_gain = tables.Float64Col(pos=85)
    slv_ch1_comp_offset = tables.Float64Col(pos=86)
    slv_ch2_comp_gain = tables.Float64Col(pos=87)
    slv_ch2_comp_offset = tables.Float64Col(pos=88)
Beispiel #21
0
    def collect_data(self, area='CA1'):
        SessionDescr = {
            'id': tb.UInt16Col(pos=1),
            'rat': tb.UInt16Col(pos=2),
            'day': tb.UInt16Col(pos=3),
            'session': tb.UInt16Col(pos=4),
            'start': tb.UInt64Col(pos=5),
            'type': tb.StringCol(itemsize=4, pos=6),
            't_theta': tb.StringCol(itemsize=16, pos=7),
            'P_theta': tb.StringCol(itemsize=16, pos=8),
            'f_theta': tb.StringCol(itemsize=16, pos=9),
            'speed': tb.StringCol(itemsize=16, pos=10),
            'radial_velocity': tb.StringCol(itemsize=16, pos=11),
            'hd_velocity': tb.StringCol(itemsize=16, pos=12)
        }

        def get_area_query(area):
            if area == "CAX":
                return '(area=="CA1")|(area=="CA3")'
            return 'area=="%s"' % area

        tetrode_query = '(%s)&(EEG==True)' % get_area_query(area)
        self.out('Using tetrode query: %s' % tetrode_query)

        self.results['scan_points'] = ('start', 'max', 'return', 'end')
        dataset_list = TetrodeSelect.datasets(tetrode_query)

        def get_dataset_sessions():
            sessions = []
            for dataset in dataset_list:
                for maze in get_maze_list(*dataset):
                    sessions.append(dataset + (maze, ))
            return sessions

        session_list = get_dataset_sessions()
        self.results['rats'] = rat_list = sorted(
            list(set(map(lambda d: d[0], dataset_list))))
        self.results['N_rats'] = len(rat_list)

        data_file = self.open_data_file()
        array_group = data_file.createGroup('/',
                                            'arrays',
                                            title='Scan and Signal Arrays')
        session_table = data_file.createTable(
            '/', 'sessions', SessionDescr,
            'Sessions for Scan Cross-Correlation Analysis')

        id_fmt = 'data_%06d'
        array_id = 0
        session_id = 0
        row = session_table.row
        remove = []

        for rds in session_list:

            rds_str = 'rat%d-%02d-m%d' % rds
            data = SessionData.get(rds)
            theta_tt = find_theta_tetrode(rds[:2], condn=tetrode_query)
            if theta_tt is None:
                remove.append(rds)
                continue
            theta_tt = theta_tt[0]

            row['id'] = session_id
            row['rat'], row['day'], row['session'] = rds
            row['type'] = (data.attrs['type']
                           in ('STD', 'MIS')) and 'DR' or 'NOV'
            row['start'] = data.start

            EEG = get_eeg_timeseries(rds, theta_tt)
            if EEG is None:
                remove.append(rds)
                continue

            ts_theta, x_theta = Theta.timeseries(*EEG)
            t_theta = data.T_(ts_theta)
            P_theta = zscore(Theta.power(x_theta, filtered=True))
            f_theta = Theta.frequency(x_theta, filtered=True)

            speed = data.F_('speed')(t_theta)
            radial_velocity = np.abs(data.F_('radial_velocity')(t_theta))
            hd_velocity = np.abs(data.F_('hd_velocity')(t_theta))

            session_signals = [('t_theta', t_theta), ('P_theta', P_theta),
                               ('f_theta', f_theta), ('speed', speed),
                               ('radial_velocity', radial_velocity),
                               ('hd_velocity', hd_velocity)]

            for k, d in session_signals:
                data_file.createArray(array_group,
                                      id_fmt % array_id,
                                      d,
                                      title='%s : %s' % (rds_str, k))
                row[k] = id_fmt % array_id
                array_id += 1

            self.out('Saved data from %s.' % rds_str)

            row.append()
            if array_id % 10 == 0:
                session_table.flush()
            session_id += 1

        for rds in remove:
            session_list.remove(rds)

        self.results['sessions'] = session_list
        self.results['N_sessions'] = len(session_list)
        self.results['signals'] = ('P_theta', 'f_theta', 'speed',
                                   'radial_velocity', 'hd_velocity')

        session_table.flush()
        self.close_data_file()
        self.out('All done!')
Beispiel #22
0
    def collect_data(self):
        """Create a data structure with theta power/frequency samples with
        corresponding instantaneous velocity measurements such as path
        speed, head direction velocity, and radial velocity
        """
        velocity_moments = ('speed', 'radial_velocity', 'hd_velocity')
        self.results['velocity_moments'] = velocity_moments

        tetrode_query = '(area=="CA1")&(EEG==True)'
        dataset_list = TetrodeSelect.datasets(tetrode_query,
                                              allow_ambiguous=True)

        samples = AutoVivification()

        def initialize_rat_samples(rat):
            for v_name in velocity_moments:
                samples[rat][v_name] = np.array([], float)
            samples[rat]['power'] = np.array([], float)
            samples[rat]['frequency'] = np.array([], float)

        def add_velocity_samples(rat, session, t):
            for moment in velocity_moments:
                add_data_sample(rat, moment, session.F_(moment)(t))

        def add_data_sample(rat, key, data):
            samples[rat][key] = np.r_[samples[rat][key], data]

        for rat, day in dataset_list:
            theta_tt, base_theta = find_theta_tetrode((rat, day),
                                                      condn=tetrode_query,
                                                      ambiguous=True)

            if rat not in samples:
                initialize_rat_samples(rat)

            for maze in get_maze_list(rat, day):
                rds = rat, day, maze
                self.out('Session rat%03d-%02d-m%d: tetrode Sc%02d' %
                         (rds + (theta_tt, )))

                session = SessionData.get(rds, load_clusters=False)

                EEG = get_eeg_timeseries(rds, theta_tt)
                if EEG is None:
                    continue

                ts, x = EEG
                ts_theta, x_theta = Theta.timeseries(ts, x)

                P_theta = zscore(Theta.power(x_theta, filtered=True))
                f_theta = Theta.frequency(x_theta, filtered=True)

                ix_scanning = select_from(ts_theta, session.scan_list)
                t_theta_scanning = session.T_(ts_theta[ix_scanning])

                add_velocity_samples(rat, session, t_theta_scanning)
                add_data_sample(rat, 'power', P_theta[ix_scanning])
                add_data_sample(rat, 'frequency', f_theta[ix_scanning])

        rat_list = sorted(list(set(samples.keys())))
        self.out('Finished collected data for %d rats.' % len(rat_list))

        sample_description = {k: tb.FloatCol() for k in velocity_moments}
        sample_description.update(rat=tb.UInt16Col(),
                                  power=tb.FloatCol(),
                                  frequency=tb.FloatCol())

        data_file = self.open_data_file()
        results_table = data_file.createTable(
            '/',
            'theta_velocity',
            sample_description,
            title='Theta and Velocity Data Across Rats')
        row = results_table.row

        self.out('Generating results table...')

        c = 0
        for rat in rat_list:
            N = samples[rat]['power'].size
            self.out('Adding rat %d, with %d samples.' % (rat, N))

            assert len(set(samples[rat][k].size
                           for k in samples[rat].keys())) == 1

            for i in xrange(N):
                row['rat'] = rat
                row['power'] = samples[rat]['power'][i]
                row['frequency'] = samples[rat]['frequency'][i]
                for moment in velocity_moments:
                    row[moment] = samples[rat][moment][i]
                row.append()

                if c % 100 == 0:
                    results_table.flush()
                if c % 500 == 0:
                    self.out.printf('.')
                c += 1

            self.out.printf('\n')
        self.out('Done!')

        self.close_data_file()
Beispiel #23
0
# Local imports
from .core.analysis import AbstractAnalysis
from scanr.tools.plot import quicktitle, shaded_error
from scanr.tools.filters import quick_boxcar

# Constants
THETA_POWER_SMOOTHING = 0.050 # 50 ms
THETA_FREQ_SMOOTHING = 0.300 # 300 ms

# Helper functions
F = lambda x, y: interp1d(x, y, fill_value=0.0, bounds_error=False)
Z = lambda x: (x - np.median(x)) / np.std(x)
norm = lambda x: x / np.trapz(x)

# Table descriptions
SessionDescr =  {   'id'        :   tb.UInt16Col(pos=1),
                    'rat'       :   tb.UInt16Col(pos=2),
                    'day'       :   tb.UInt16Col(pos=3),
                    'session'   :   tb.UInt16Col(pos=4),
                    'type'      :   tb.StringCol(itemsize=4, pos=5),
                    't_theta'   :   tb.StringCol(itemsize=16, pos=6),
                    'ZP_theta'  :   tb.StringCol(itemsize=16, pos=7),
                    'f_theta'   :   tb.StringCol(itemsize=16, pos=8),
                    'scans'     :   tb.StringCol(itemsize=16, pos=9),
                    'pauses'    :   tb.StringCol(itemsize=16, pos=10),
                    'ripples'   :   tb.StringCol(itemsize=16, pos=11)    }


class ScanRipples(AbstractAnalysis):

    """
Beispiel #24
0
class Destination(tables.IsDescription):
    geoid = tables.StringCol(15, pos=1)
    index = tables.UInt16Col(pos=2)
Beispiel #25
0
class TableConfig(tb.IsDescription):
    '''Configuration class for the tables created by pytables when saving'''
    image = tb.UInt16Col(shape=(imageY, imageX + numOverscan))
Beispiel #26
0
class HitInfoTable(tb.IsDescription):
    event_number = tb.Int64Col(pos=0)
    frame = tb.UInt8Col(pos=1)
    column = tb.UInt16Col(pos=2)
    row = tb.UInt16Col(pos=3)
    charge = tb.UInt16Col(pos=4)
Beispiel #27
0
    Ilias Bilionis
"""


import tables as tb
import numpy as np


if __name__ == '__main__':
    num_params = 10
    params = np.random.randn(num_params)
    filters = tb.Filters(complevel=9)
    fd = tb.open_file('test_db.h5', mode='a', filters=filters)
    fd.create_group('/', 'mcmc', 'Metropolis-Hastings Algorithm')
    # Data type for a single record in the chain
    single_record_dtype = {'step': tb.UInt16Col(),
                           'params': tb.Float32Col(shape=(num_params,)),
                           'proposal': tb.UInt16Col(),
                           'log_like': tb.Float32Col(),
                           'log_prior': tb.Float32Col(),
                           'grad_log_like': tb.Float32Col(shape=(num_params,)),
                           'grad_log_prior': tb.Float32Col(shape=(num_params,)),
                           'accepted': tb.UInt16Col()}
    table = fd.create_table('/mcmc', 'chain_000', single_record_dtype, 'Chain: 0')
    chain = table.row
    for i in xrange(1000):
        print i
        chain['step'] = i
        chain['params'] = np.random.randn(num_params)
        chain['proposal'] = 0
        chain['log_like'] = np.random.rand()
Beispiel #28
0
from .core.analysis import AbstractAnalysis
from scanr.tools.radians import xy_to_deg_vec
from scanr.tools.stats import integer_hist, SampleDistribution
from scanr.tools.plot import AxesList, textlabel, quicktitle

# Constants
RATEMAP_BINS = 48
MIN_SCAN_MAGNITUDE = 10
MIN_FIELD_RATE = 1.5
MIN_FIELD_SIZE = 15
MIN_TRAVERSAL_SIZE = 15
MIN_TRAVERSAL_SPIKES = 3

# Table descriptions
ScanDescr = {
    'rat': tb.UInt16Col(pos=1),
    'day': tb.UInt16Col(pos=2),
    'session': tb.UInt16Col(pos=3),
    'tc': tb.StringCol(itemsize=8, pos=4),
    'scan': tb.UInt16Col(pos=5),
    'field_distance': tb.FloatCol(pos=6),
    'traversal_distance': tb.FloatCol(pos=7),
    'strength': tb.FloatCol(pos=8),
    'field_size': tb.FloatCol(pos=9),
    'traversal_size': tb.FloatCol(pos=10),
    'out_spikes': tb.UInt16Col(pos=11),
    'in_spikes': tb.UInt16Col(pos=12),
    'spikes': tb.UInt16Col(pos=13)
}

Beispiel #29
0
from .time import elapsed, exclude_from
from .data import get_kdata_file, flush_file, get_group, new_table, get_node
from .tools.bash import CPrint
from .tools.misc import (contiguous_groups, merge_adjacent_groups,
                         unique_pairs, DataSpreadsheet)
from .tools.radians import xy_to_deg_vec, circle_diff_vec_deg, shortcut_deg
from .tools.filters import filtfilt, find_minima, quick_boxcar
from .tools.stats import IQR

# Constants
DEBUG = Config['debug_mode']
CfgScan = Config['scanning']
CfgPause = Config['pauses']

ScanDescr = {
    'id': tb.UInt16Col(pos=1),
    'rat': tb.UInt16Col(pos=2),
    'day': tb.UInt16Col(pos=3),
    'session': tb.UInt16Col(pos=4),
    'type': tb.StringCol(itemsize=16, pos=5),
    'prefix': tb.UInt64Col(pos=6),
    'prepause': tb.UInt64Col(pos=7),
    'start': tb.UInt64Col(pos=8),
    'max': tb.UInt64Col(pos=9),
    'mid': tb.UInt64Col(pos=10),
    'return': tb.UInt64Col(pos=11),
    'end': tb.UInt64Col(pos=12),
    'postfix': tb.UInt64Col(pos=13),
    'tlim': tb.UInt64Col(shape=(2, ), pos=14),
    'slice': tb.UInt32Col(shape=(2, ), pos=15),
    'outbound': tb.UInt32Col(shape=(2, ), pos=16),
Beispiel #30
0
class totalData(tables.IsDescription):
	year	     = tables.UInt16Col()
	match_count  = tables.UInt64Col()
	volume_count = tables.UInt64Col()
	page_count 	 = tables.UInt32Col()