예제 #1
0
    def make_channel_map(self):
        """
        Return a ChannelMap and vectors of data array channels corresponding to electrode, grounded inputs,
        and reference electrodes.

        Returns
        -------
        channel_map: ChannelMap
            data-channel to electrode-grid map
        electrode_chans: list
            data channels that are electrodes
        grounded: list
            data channels that are grounded input (possibly empty)
        reference: list
            data channels that are reference electrodes (possibly empty)

        """

        channel_map, grounded, reference = get_electrode_map(
            self.electrode, connectors=self.electrode_connectors)
        with h5py.File(self.data_file, 'r') as h5file:
            n_data_channels = h5file[self.data_array].shape[int(
                self.transpose_array)]
        electrode_chans = [
            n for n in range(n_data_channels) if n not in grounded + reference
        ]
        return channel_map, electrode_chans, grounded, reference
예제 #2
0
def load_open_ephys_impedance(exp_path,
                              test,
                              magphs=True,
                              electrode=None,
                              electrode_connections=()):

    xml = osp.join(osp.join(exp_path, test), 'impedance_measurement.xml')
    if not osp.exists(xml):
        raise IOError('No impedance XMl found')

    root = etree.ElementTree(file=xml).getroot()
    chans = root.getchildren()
    mag = np.zeros(len(chans), 'd')
    phs = np.zeros(len(chans), 'd')
    for n, ch in enumerate(chans):
        # cannot rely on "channel_number" -- it only counts 0-31
        # n = int(ch.attrib['channel_number'])
        mag[n] = float(ch.attrib['magnitude'])
        phs[n] = float(ch.attrib['phase'])

    if electrode:
        chan_map, gnd_chans, ref_chans = get_electrode_map(
            electrode, connectors=electrode_connections)
        ix = np.arange(len(phs))
        cx = np.setdiff1d(ix, np.union1d(gnd_chans, ref_chans))
        mag = mag[cx]
        phs = phs[cx]
    if magphs:
        r_value = (mag, phs)
    else:
        r_value = mag * np.exp(1j * phs * np.pi / 180.0)
    if electrode:
        return r_value, chan_map
    return r_value
예제 #3
0
def load_cooked(exp_path, test, electrode, **kwargs):
    m = sio.loadmat(os.path.join(exp_path, test + '.mat'))
    data = m.pop('dataf').copy(order='C')
    m = sio.loadmat(os.path.join(exp_path, test + '_trig.mat'))
    trigs = m.pop('indxfilt').squeeze()
    Fs = 500.0

    #chan_map = ChannelMap( range(1,9), (2,5), col_major=False )
    chan_map = epins.get_electrode_map(electrode)[0]

    bandpass = (2, -1)
    return data, trigs, Fs, chan_map, bandpass
예제 #4
0
 def make_channel_map(self):
     if os.path.isdir(self.primary_data_file):
         data_path, rec_num = prepare_paths(self.experiment_path,
                                            self.recording, 'auto')
         channel_files = OE.get_filelist(data_path,
                                         source=rec_num[0],
                                         ctype='CH')
         n_data_channels = len(channel_files)
     else:
         with h5py.File(self.data_file, 'r') as h5file:
             n_data_channels = h5file[self.data_array].shape[0]
     channel_map, grounded, reference = get_electrode_map(
         self.electrode, self.electrode_connectors)
     electrode_chans = [
         n for n in range(n_data_channels) if n not in grounded + reference
     ]
     return channel_map, electrode_chans, grounded, reference
예제 #5
0
    def launch(self):
        if not os.path.exists(self.file_data.file):
            return

        # Logic to normalize channel mapping
        # TODO: handle reference channels correctly
        if self.chan_map == 'unknown':
            try:
                nc = np.array(list(map(int, self.skip_chan.split(','))))
            except:
                nc = []
            geo = list(map(int, self.elec_geometry.split(',')))
            n_sig_chan = self.n_chan - len(nc)
            chan_map = ChannelMap(np.arange(n_sig_chan), geo)
        elif self.chan_map == 'active':
            chan_map, nc = self.file_data.make_channel_map()
        elif self.chan_map in _subset_chan_maps:
            cnx = self.chan_map_connectors.split(',')
            cnx = [c.strip() for c in cnx]
            chan_map, nc, rf = get_electrode_map(self.chan_map, connectors=cnx)
            nc = list(set(nc).union(rf))
        elif self.chan_map in _subset_shortcuts:
            cnx = _subset_shortcuts[self.chan_map]
            map_name, shortcut = self.chan_map.split('/')
            chan_map, nc, rf = get_electrode_map(map_name, connectors=cnx)
            nc = list(set(nc).union(rf))
        elif self.chan_map == 'settable':
            chan_map = self.set_chan_map
            nc = []
        elif self.chan_map == 'pickled':
            try:
                chan_map = find_pickled_map(self.file_data.file)
                nc = []
            except NoPickleError:
                MessageDialog(message='No pickled ChannelMap').open()
                return
        else:
            chan_map, nc, rf = get_electrode_map(self.chan_map)
            nc = list(set(nc).union(rf))

        # Check for transposed active data (coming from matlab)
        if isinstance(self.file_data,
                      ActiveArrayFileData) and self.file_data.is_transpose:
            self.file_data.create_transposed()
            print(self.file_data.file)

        with h5py.File(self.file_data.file, 'r') as h5:
            #x_scale = h5[self.file_data.fs_field].value ** -1.0
            x_scale = self.file_data.Fs**-1.0
            array_size = h5[self.file_data.data_field].shape[0]
        num_vectors = len(chan_map) + len(nc)

        data_channels = [
            self.file_data.data_channels[i] for i in range(num_vectors)
            if i not in nc
        ]

        # permute  channels to stack rows
        chan_idx = list(zip(*chan_map.to_mat()))
        chan_order = chan_map.lookup(*list(zip(*sorted(chan_idx)[::-1])))
        data_channels = [data_channels[i] for i in chan_order]
        cls = type(chan_map)
        chan_map = cls([chan_map[i] for i in chan_order],
                       chan_map.geometry,
                       pitch=chan_map.pitch,
                       col_major=chan_map.col_major)

        filters = self.filters.make_pipeline(x_scale**-1.0)
        array = self.file_data._compose_arrays(filters)
        if self.screen_channels:
            data_channels, chan_map = \
              self._get_screen(array, data_channels, chan_map, x_scale**-1.0)

        rm = np.zeros((array_size, ), dtype='?')
        rm[data_channels] = True

        nav = h5mean(array.file_array, 0, rowmask=rm)
        nav *= self.file_data.y_scale

        modules = [ana_modules[k] for k in self.module_set]
        new_vis = FastScroller(array,
                               self.file_data.y_scale,
                               self.offset * 1e-6,
                               chan_map,
                               nav,
                               x_scale=x_scale,
                               load_channels=data_channels,
                               max_zoom=self.max_window_width)
        file_name = os.path.split(self.file_data.file)[1]
        file_name = os.path.splitext(file_name)[0]
        v_win = VisWrapper(new_vis,
                           x_scale=x_scale,
                           chan_map=chan_map,
                           y_spacing=self.offset,
                           modules=modules,
                           recording=file_name)
        view = v_win.default_traits_view()
        # TODO: it would be nice to be able to directly call launch() without first showing *this* object's panel
        view.kind = 'live'
        ui = v_win.edit_traits(view=view)
        return v_win
예제 #6
0
파일: mux.py 프로젝트: miketrumpis/ecogdata
def load_mux(exp_path,
             test,
             electrode,
             headstage,
             ni_daq_variant='',
             mux_connectors=(),
             bandpass=(),
             notches=(),
             trigger=0,
             bnc=(),
             mux_notches=(),
             save=False,
             snip_transient=True,
             units='uV'):
    """
    Load data from the MUX style headstage acquisition. Data is expected 
    to be organized along columns corresponding to the MUX units. The
    columns following sensor data columns are assumed to be a stimulus
    trigger followed by other BNC channels.

    The electrode information must be provided to determine the
    arrangement of recorded and grounded channels within the sensor
    data column.
    
    This preprocessing routine returns a Bunch container with the
    following items
    
    dset.data : nchan x ntime data array
    dset.ground_chans : m x ntime data array of grounded ADC channels
    dset.bnc : un-MUXed readout of the BNC channel(s)
    dset.chan_map : the channel-to-electrode mapping vector
    dset.Fs : sampling frequency
    dset.name : path + expID for the given data set
    dset.bandpass : bandpass filtering applied (if any)
    dset.trig : the logical value of the trigger channel (at MUX'd Fs)

    * If saving, then a table of the Bunch is written.
    * If snip_transient, then advance the timeseries past the bandpass
      filtering onset transient.
    
    """

    try:
        dset = try_saved(exp_path, test, bandpass)
        return dset
    except DataPathError:
        pass

    # say no to shared memory since it's created later on in this method
    loaded = rawload_mux(exp_path,
                         test,
                         headstage,
                         daq_variant=ni_daq_variant,
                         shm=False)
    channels, Fs, dshape, info = loaded
    nrow, ncol_data = dshape
    if channels.shape[0] >= nrow * ncol_data:
        ncol = channels.shape[0] // nrow
        channels = channels.reshape(ncol, nrow, -1)
    else:
        ncol = channels.shape[0]
        channels.shape = (ncol, -1, nrow)
        channels = channels.transpose(0, 2, 1)

    ## Grab BNC data

    if bnc:
        bnc_chans = [ncol_data + int(b) for b in bnc]
        bnc = np.zeros((len(bnc), nrow * channels.shape[-1]))
        for bc, col in zip(bnc, bnc_chans):
            bc[:] = channels[col].transpose().ravel()
        bnc = bnc.squeeze()

    try:
        trig_chans = channels[ncol_data + trigger].copy()
        pos_edge, trig = process_trigger(trig_chans)
    except IndexError:
        pos_edge = ()
        trig = ()

    ## Realize channel mapping

    chan_map, disconnected, reference = epins.get_electrode_map(
        electrode, connectors=mux_connectors)

    ## Data channels

    # if any pre-processing of multiplexed channels, do it here first
    if mux_notches:

        mux_chans = shm.shared_ndarray((ncol_data, channels.shape[-1], nrow))
        mux_chans[:] = channels[:ncol_data].transpose(0, 2, 1)
        mux_chans.shape = (ncol_data, -1)
        ft.notch_all(mux_chans, Fs, lines=mux_notches, filtfilt=True)
        mux_chans.shape = (ncol_data, channels.shape[-1], nrow)
        channels[:ncol_data] = mux_chans.transpose(0, 2, 1)
        del mux_chans

    rec_chans = channels[:ncol_data].reshape(nrow * ncol_data, -1)

    if units.lower() != 'v':
        convert_scale(rec_chans, 'v', units)

    g_chans = disconnected
    r_chans = reference
    d_chans = np.setdiff1d(np.arange(ncol_data * nrow),
                           np.union1d(g_chans, r_chans))

    data_chans = shm.shared_copy(rec_chans[d_chans])
    if len(g_chans):
        gnd_data = rec_chans[g_chans]
    else:
        gnd_data = ()
    if len(r_chans):
        ref_data = rec_chans[r_chans]
    else:
        ref_data = ()
    del rec_chans
    del channels

    # do highpass filtering for stationarity
    if bandpass:
        # manually remove DC from channels before filtering
        if bandpass[0] > 0:
            data_chans -= data_chans.mean(1)[:, None]
            # do a high order highpass to really crush the crappy
            # low frequency noise
            b, a = ft.butter_bp(lo=bandpass[0], Fs=Fs, ord=5)
            # b, a = ft.cheby1_bp(0.5, lo=bandpass[0], Fs=Fs, ord=5)
        else:
            b = [1]
            a = [1]
        if bandpass[1] > 0:
            b_lp, a_lp = ft.butter_bp(hi=bandpass[1], Fs=Fs, ord=3)
            b = np.convolve(b, b_lp)
            a = np.convolve(a, a_lp)

        filtfilt(data_chans, b, a)
        if len(ref_data):
            with parallel_controller(False):
                ref_data = np.atleast_2d(ref_data)
                filtfilt(ref_data, b, a)
                ref_data = ref_data.squeeze()

    if notches:
        ft.notch_all(data_chans,
                     Fs,
                     lines=notches,
                     inplace=True,
                     filtfilt=True)
        if len(ref_data):
            with parallel_controller(False):
                ref_data = np.atleast_2d(ref_data)
                ft.notch_all(ref_data,
                             Fs,
                             lines=notches,
                             inplace=True,
                             filtfilt=True)
                ref_data = ref_data.squeeze()

    if snip_transient:
        if isinstance(snip_transient, bool):
            snip_len = int(Fs * 5)
        else:
            snip_len = int(Fs * snip_transient)
        if len(pos_edge):
            snip_len = max(0, min(snip_len, pos_edge[0] - int(Fs)))
            pos_edge = pos_edge - snip_len
            trig = trig[..., snip_len:].copy()
        data_chans = data_chans[..., snip_len:].copy()
        gnd_data = gnd_data[..., snip_len:].copy()
        if len(ref_data):
            ref_data = ref_data[..., snip_len:].copy()
        if len(bnc):
            bnc = bnc[..., snip_len * nrow:].copy()

    # do blockwise detrending for stationarity
    ## detrend_window = int(round(0.750*Fs))
    ## ft.bdetrend(data_chans, bsize=detrend_window, type='linear', axis=-1)
    dset = ut.Bunch()
    dset.pos_edge = pos_edge
    dset.data = data_chans
    dset.ground_chans = gnd_data
    dset.ref_chans = ref_data
    dset.bnc = bnc
    dset.chan_map = chan_map
    dset.Fs = Fs
    #dset.name = os.path.join(exp_path, test)
    dset.bandpass = bandpass
    dset.trig = trig
    dset.transient_snipped = snip_transient
    dset.units = units
    dset.notches = notches
    dset.info = info

    if save:
        hf = os.path.join(exp_path, test + '_proc.h5')
        save_bunch(hf, '/', dset, mode='w')
    return dset
예제 #7
0
def plot_Z(path_or_Z,
           electrode,
           minZ,
           maxZ,
           cmap,
           phs=False,
           title='',
           ax=None,
           cbar=True,
           clim=None,
           electrode_connections=()):
    # from ecogana.anacode.colormaps import nancmap

    if isinstance(path_or_Z, str):
        path = osp.abspath(path_or_Z)
        path, test = osp.split(path)
        if not len(title):
            title = test
        Z, chan_map = load_open_ephys_impedance(
            path, test, electrode, electrode_connections=electrode_connections)
        Z = Z[1] if phs else Z[0]
    else:
        Z = path_or_Z.copy()
        chan_map = get_electrode_map(electrode)[0]

    if not phs:
        Z *= 1e-3
        Z_open = Z > maxZ
        Z_shrt = Z < minZ
        np.log10(Z, Z)
        Z[Z_open] = 1e20
        Z[Z_shrt] = -1
        lo = Z[~(Z_open | Z_shrt)].min()
        hi = Z[~(Z_open | Z_shrt)].max()
    else:
        lo, hi = Z.min(), Z.max()
    if np.abs(lo - round(lo)) < np.abs(hi - round(hi)):
        lo = round(lo)
        hi = np.ceil(hi)
    # else:
    ##     lo = np.floor(lo)
    ##     hi = round(hi)

    if clim is None:
        clim = (lo, hi)
    else:
        lo, hi = clim

    # cm = nancmap(cmap, overc='gray', underc='lightgray')
    r_ = chan_map.image(Z, cmap=cmap, clim=clim, ax=ax, cbar=cbar)
    if cbar:
        f, cb = r_
    else:
        f = r_
    if ax is None:
        ax = f.axes[-(1 + int(cbar))]
    if not phs:
        bad = Z_open.sum() + Z_shrt.sum()
        yld = 100 * (1 - float(bad) / len(Z_open))
        title = title + ' ({0:.2f}% yield)'.format(yld)
    ax.set_title(title)
    if phs:
        if cbar:
            cb.set_label('Phase (degrees)')
        f.tight_layout()
        return f
    if cbar:
        c_ticks = np.array([
            0.1, 0.2, 0.5, 1, 2, 5, 10, 20, 50, 100, 200, 500, 1000, 2000, 5000
        ])
        c_ticks = c_ticks[(c_ticks >= 10**lo) & (c_ticks <= 10**hi)]
        cb.set_ticks(np.log10(c_ticks))
        cb.set_ticklabels(list(map(str, c_ticks)))
        cb.set_label(u'Impedance (k\u03A9)')
    f.tight_layout()
    return f
예제 #8
0
def load_blackrock(
        exp_path, test, electrode, connections=(), 
        downsamp=15, page_size=10, bandpass=(), notches=(), 
        save=True, snip_transient=True, lowpass_ord=12, units='uV',
        **extra
        ):
    """
    Load raw data in an HDF5 table stripped from Blackrock NSx format.
    This data should be 16 bit signed integer sampled at 30 kHz. We
    take the approach of resampling to a lower rate (default 2 kHz)
    before subsequent bandpass filtering. This improves the numerical
    stability of the bandpass filter design.

    """

    dsamp_path = p.join(exp_path, 'downsamp')
    nsx_path = p.join(exp_path, 'blackrock')

    ## Get array-to-channel pinouts
    chan_map, disconnected = epins.get_electrode_map(electrode, connectors=connections)[:2]

    # try preproc path first to see if this run has already been downsampled
    load_nsx = True
    if downsamp > 1:
        dsamp_Fs = 3e4 / downsamp
        try:
            test_file = p.join(dsamp_path, test) + '_Fs%d.h5'%dsamp_Fs
            print('searching for', test_file)
            h5f = tables.open_file(test_file)
            downsamp = 1
            load_nsx = False
        except IOError:
            print('Resampled data not found: downsampling to %d Hz'%dsamp_Fs)

    if load_nsx:
        test_file = p.join(nsx_path, test+'.h5')
        h5f = tables.open_file(test_file)

    if downsamp > 1:
        (b, a) = cheby2_bp(60, hi=1.0/downsamp, Fs=2, ord=lowpass_ord)

        if not p.exists(dsamp_path):
            os.mkdir(dsamp_path)
        save_file = p.join(dsamp_path, test) + '_Fs%d.h5'%dsamp_Fs
        h5_save = tables.open_file(save_file, mode='w')
        h5_save.create_array(h5_save.root, 'Fs', dsamp_Fs)
    else:
        # in this case, either the preprocessed data has been found,
        # or downsampling was not requested, which will probably
        # *NEVER* happen
        if load_nsx:
            dlen, nchan = h5f.root.data.shape
            required_mem = dlen * nchan * np.dtype('d').itemsize
            if required_mem > 8e9:
                raise MemoryError(
                    'This dataset would eat %.2f GBytes RAM'%(required_mem/1e9,)
                    )

    dlen, nchan = h5f.root.data.shape
    if dlen < nchan:
        (dlen, nchan) = (nchan, dlen)
        tdim = 1
    else:
        tdim = 0
    
    sublen = dlen / downsamp
    if dlen - sublen*downsamp > 0:
        sublen += 1

    # set up arrays for loaded data and ground chans
    subdata = array_split.shared_ndarray((len(chan_map), sublen))
    if len(chan_map) < nchan:
        gndchan = np.empty((len(disconnected), sublen), 'd')
    else:
        gndchan = None

    # if saving downsampled results, set up H5 table (in c-major fashion)
    if downsamp > 1:
        atom = tables.Float64Atom()
        #filters = tables.Filters(complevel=5, complib='zlib')
        filters = None
        saved_array = h5_save.create_earray(
            h5_save.root, 'data', atom=atom, shape=(0, sublen),
            filters=filters, expectedrows=nchan
            )
    if page_size < 0:
        page_size = nchan
    peel = array_split.shared_ndarray( (page_size, dlen) )
    n = 0
    dstop = 0
    h5_data = h5f.root.data
    while n < nchan:
        start = n
        stop = min(nchan, n+page_size)
        print('processing BR channels %03d - %03d'%(start, stop-1))
        if tdim == 0:
            peel[0:stop-n] = h5_data[:,start:stop].T.astype('d', order='C')
        else:
            peel[0:stop-n] = h5_data[start:stop,:].astype('d')
        if downsamp > 1:
            convert_dyn_range(peel, (-2**15, 2**15), (-8e-3, 8e-3), out=peel)
            print('parfilt', end=' ')
            sys.stdout.flush()
            filtfilt(peel[0:stop-n], b, a)
            print('done')
            sys.stdout.flush()
            print('saving chans', end=' ') 
            sys.stdout.flush()
            saved_array.append(peel[0:stop-n,::downsamp])
            print('done')
            sys.stdout.flush()

        if units.lower() != 'v':
            convert_scale(peel, 'v', units)
        data_chans = np.setdiff1d(np.arange(start,stop), disconnected)
        if len(data_chans):
            dstart = dstop
            dstop = dstart + len(data_chans)
        if len(data_chans) == (stop-start):
            # if all data channels, peel off in a straightforward way
            #print (dstart, dstop)
            subdata[dstart:dstop,:] = peel[0:stop-n,::downsamp]
        else:
            if len(data_chans):
                # get data channels first
                raw_data = peel[data_chans-n, :]
                #print (dstart, dstop), data_chans-n
                subdata[dstart:dstop, :] = raw_data[:, ::downsamp]
            # Now filter for ground channels within this set of channels:
            gnd_chans = [x for x in zip(disconnected,
                                        range(len(disconnected)))
                            if x[0]>=start and x[0]<stop]
            for g in gnd_chans:
                gndchan[g[1], :] = peel[g[0]-n, ::downsamp]
        n += page_size

    del peel
    try:
        Fs = h5f.root.Fs.read()[0,0] / downsamp
    except TypeError:
        Fs = h5f.root.Fs.read() / downsamp
    trigs = h5f.root.trig_idx.read().squeeze()
    if not trigs.shape:
        trigs = ()
    else:
        trigs = np.round( trigs / downsamp ).astype('i')
    h5f.close()
    if downsamp > 1:
        h5_save.create_array(h5_save.root, 'trig_idx', np.asarray(trigs))
        h5_save.close()

    # seems to be more numerically stable to do highpass and 
    # notch filtering after downsampling
    if bandpass:
        lo, hi = bandpass
        (b, a) = butter_bp(lo=lo, hi=hi, Fs=Fs, ord=4)
        filtfilt(subdata, b, a)
        
    if notches:
        notch_all(subdata, Fs, lines=notches, inplace=True, filtfilt=True)

        
    dset = ut.Bunch()
    dset.data = subdata
    dset.ground_chans = gndchan
    dset.chan_map = chan_map
    dset.Fs = Fs
    #dset.name = os.path.join(exp_path, test)
    dset.bandpass = bandpass
    dset.notches = notches
    dset.trig = trigs
    if len(trigs) == subdata.shape[-1]:
        dset.pos_edge = np.where( np.diff(trigs) > 0 )[0] + 1
    else:
        dset.pos_edge = trigs
    dset.units = units
    gc.collect()
    return dset
예제 #9
0
def load_afe(exp_pth,
             test,
             electrode,
             n_data,
             range_code,
             cycle_rate,
             units='nA',
             bandpass=(),
             save=True,
             notches=(),
             snip_transient=True,
             **extra):

    h5 = tables.open_file(os.path.join(exp_pth, test + '.h5'))

    data = h5.root.data[:]
    Fs = h5.root.Fs[0, 0]

    if data.shape[1] > n_data:
        trig_chans = data[:, n_data:]
        trig = np.any(trig_chans > 1, axis=1).astype('i')
        pos_edge = np.where(np.diff(trig) > 0)[0] + 1
    else:
        trig = None
        pos_edge = ()

    data_chans = data[:, :n_data].T.copy(order='C')

    # convert dynamic range to charge or current
    if 'v' not in units.lower():
        pico_coulombs = range_lookup[range_code]
        convert_dyn_range(data_chans, (-1.4, 1.4),
                          pico_coulombs,
                          out=data_chans)
        if 'a' in units.lower():
            # To convert to amps, need to divide coulombs by the
            # integration period. This is found approximately by
            # finding out how many cycles in a scan period were spent
            # integrating. A scan period is now hard coded to be 500
            # cycles. The cycling rate is given as an argument.
            # The integration period for channel i should be:
            # 500 - 2*(n_data - i)
            # That is, the 1st channel is clocked out over two cycles
            # immediately after the integration period. Meanwhile other
            # channels still acquire until they are clocked out.
            n_cycles = 500
            #i_cycles = n_cycles - 2*(n_data - np.arange(n_data))
            i_cycles = n_cycles - 2 * n_data
            i_period = i_cycles / cycle_rate
            data_chans /= i_period  #[:,None]
            convert_scale(data_chans, 'pa', units)
    elif units.lower() != 'v':
        convert_scale(data, 'v', units)

    # only use this one electrode (for now)
    chan_map, disconnected = epins.get_electrode_map('psv_61_afe')[:2]
    connected = np.setdiff1d(np.arange(n_data), disconnected)
    disconnected = disconnected[disconnected < n_data]

    chan_map = chan_map.subset(list(range(len(connected))))

    data = shm.shared_ndarray((len(connected), data_chans.shape[-1]))
    data[:, :] = data_chans[connected]
    ground_chans = data_chans[disconnected].copy()
    del data_chans

    if bandpass:
        # do a little extra to kill DC
        data -= data.mean(axis=1)[:, None]
        (b, a) = ft.butter_bp(lo=bandpass[0], hi=bandpass[1], Fs=Fs)
        filtfilt(data, b, a)
    if notches:
        for freq in notches:
            (b, a) = ft.notch(freq, Fs=Fs, ftype='cheby2')
            filtfilt(data, b, a)

    ## detrend_window = int(round(0.750*Fs))
    ## ft.bdetrend(data, bsize=detrend_window, type='linear', axis=-1)
    else:
        data -= data.mean(axis=1)[:, None]

    if snip_transient:
        snip_len = min(10000, pos_edge[0]) if len(pos_edge) else 10000
        data = data[..., snip_len:].copy()
        ground_chans = ground_chans[..., snip_len:].copy()
        if len(pos_edge):
            trig = trig[..., snip_len:]
            pos_edge -= snip_len

    dset = Bunch()

    dset.data = data
    dset.ground_chans = ground_chans
    dset.chan_map = chan_map
    dset.Fs = Fs
    dset.pos_edge = pos_edge
    dset.bandpass = bandpass
    dset.trig = trig
    dset.transient_snipped = snip_transient
    dset.units = units
    dset.notches = notches
    return dset
예제 #10
0
def load_afe_aug21(exp_pth,
                   test,
                   electrode,
                   n_data,
                   range_code,
                   cycle_rate,
                   units='nA',
                   bandpass=(),
                   save=False,
                   notches=(),
                   snip_transient=True,
                   **extra):
    h5 = tables.open_file(os.path.join(exp_pth, test + '.h5'))
    Fs = h5.root.Fs.read()

    n_row = h5.root.numRow.read()
    n_data_col = h5.root.numCol.read()
    n_col = h5.root.numChan.read()
    # data rows are 4:67 -- acquiring AFE chans 31:0 and 63:32 on two columns
    data_rows = slice(4, 67, 2)

    full_data = h5.root.data[:].reshape(n_col, n_row, -1)

    #data_chans = shm.shared_ndarray( (32*n_data_col, full_data.shape[-1]) )
    data_chans = full_data[:n_data_col,
                           data_rows].reshape(-1, full_data.shape[-1])
    trig_chans = full_data[-10:, -1]
    del full_data

    trig = np.any(trig_chans > 1, axis=0).astype('i')
    pos_edge = np.where(np.diff(trig) > 0)[0] + 1

    # convert dynamic range to charge or current
    if 'v' not in units.lower():
        pico_coulombs = range_lookup[range_code]
        convert_dyn_range(data_chans, (-1.4, 1.4),
                          pico_coulombs,
                          out=data_chans)
        if 'a' in units.lower():
            i_period = 563e-6
            data_chans /= i_period
            convert_scale(data_chans, 'pa', units)
    elif units.lower() != 'v':
        convert_scale(data_chans, 'v', units)

    ## # only use this one electrode (for now)
    chan_map, disconnected = epins.get_electrode_map('psv_61_afe')[:2]
    connected = np.setdiff1d(np.arange(n_data), disconnected)
    disconnected = disconnected[disconnected < n_data]

    data = shm.shared_ndarray((len(connected), data_chans.shape[-1]))
    data[:, :] = data_chans[connected]
    ground_chans = data_chans[disconnected]

    del data_chans
    # do a little extra to kill DC
    data -= data.mean(axis=1)[:, None]
    if bandpass:
        (b, a) = ft.butter_bp(lo=bandpass[0], hi=bandpass[1], Fs=Fs)
        filtfilt(data, b, a)
    if notches:
        ft.notch_all(data, Fs, lines=notches, inplace=True, filtfilt=True)

    if snip_transient:
        snip_len = min(10000, pos_edge[0]) if len(pos_edge) else 10000
        data = data[..., snip_len:].copy()
        if ground_chans is not None:
            ground_chans = ground_chans[..., snip_len:].copy()
        if len(pos_edge):
            trig = trig[..., snip_len:]
            pos_edge -= snip_len

    dset = ut.Bunch()

    dset.data = data
    dset.ground_chans = ground_chans
    dset.chan_map = chan_map
    dset.Fs = Fs
    dset.pos_edge = pos_edge
    dset.bandpass = bandpass
    dset.trig = trig
    dset.transient_snipped = snip_transient
    dset.units = units
    dset.notches = notches
    return dset