def partition_responses(rmap_bins=36,
                        table='cell_information',
                        quality='None',
                        smooth=True,
                        just_bins=False):
    from scanr.cluster import PrincipalCellCriteria, get_min_quality_criterion, AND
    from scanr.data import get_node

    pbins = np.array(
        [1.1, 0.5, 0.1, 0.05, 0.03, 0.02, 0.01, 0.005, 0.002, 0.0])
    ibins = np.linspace(0, 6, 13)
    if just_bins:
        return pbins, ibins
    N_pbins, N_ibins = len(pbins) - 1, len(ibins) - 1

    R = [[[] for j in xrange(N_ibins)] for i in xrange(N_pbins)]
    cell_table = get_node('/physiology', table)

    cell_criteria = AND(PrincipalCellCriteria,
                        get_min_quality_criterion(quality))

    for cell in cell_table.where('(area=="CA3")|(area=="CA1")'):

        session = SessionData.get((cell['rat'], cell['day'], cell['session']),
                                  load_clusters=False)
        cluster = session.cluster_data(cell['tc'])

        if not (cell['N_running'] > 30 and cell_criteria.filter(cluster)):
            continue

        pix = (cell['p_value'] <= pbins).nonzero()[0]
        iix = (cell['I'] >= ibins).nonzero()[0]

        if not len(pix) or not (0 <= pix[-1] < N_pbins):
            continue
        if not len(iix) or not (0 <= iix[-1] < N_ibins):
            continue

        pix = pix[-1]
        iix = iix[-1]

        R[pix][iix].append(
            session.get_cluster_ratemap(cluster,
                                        bins=rmap_bins,
                                        smoothing=smooth,
                                        blur_width=360. / rmap_bins,
                                        exclude_off_track=True,
                                        exclude=session.scan_and_pause_list))

        print '...added %s to p-value bin %.4f, info bin %.2f...' % (
            cell['tc'], pbins[pix], ibins[iix])

    for i, row in enumerate(R):
        for j, rmap in enumerate(row):
            R[i][j] = r = np.asarray(rmap)
            if not len(rmap):
                continue
            R[i][j] = r[np.argsort(np.argmax(r, axis=1))]

    return R
Beispiel #2
0
    def plot_LFP_example(self, rds=(95,4,1), scan_number=22, margin=2.0):
        """Plot example LFP traces showing theta power during a head scan
        """
        data = SessionData.get(rds)
        scan_table = get_node('/behavior', 'scans')
        scan = get_unique_row(scan_table,
            data.session_query + '&(number==%d)'%scan_number)

        t_scan = { k: data.T_(scan[k]) for k in ScanPoints }
        window = t_scan['downshift'] - margin, t_scan['upshift'] + margin

        plt.ioff()
        f = self.new_figure('scan_LFP_example',
            'Scan LFP Example: Rat %d, Day %d, M%d @ t=%.2f'%(rds + (window[0],)))

        theta_tt = find_theta_tetrode(rds[:2], condn='(EEG==True)&(area=="CA1")')[0]
        ts, EEG = get_eeg_timeseries(rds, theta_tt)
        t_EEG = data.T_(ts)

        data_file = self.get_data_file()
        session = get_unique_row(data_file.root.sessions, data.session_query)
        t_theta, x_theta = Theta.timeseries(t_EEG, EEG)
        ZP_theta = Z(Theta.power(x_theta, filtered=True)) #data_file.getNode('/arrays', session['ZP_theta'])
        f_theta = data_file.getNode('/arrays', session['f_theta'])

        traj = data.trajectory
        t_traj = data.T_(traj.ts)
        s_traj = time_slice(t_traj, start=window[0], end=window[1])
        s_EEG = time_slice(t_EEG, start=window[0], end=window[1])
        s_theta = time_slice(t_theta, start=window[0], end=window[1])

        y = 0
        dy = -2
        norm = lambda x: (x - np.mean(x)) / (1.1 * float(np.max(np.abs((x - np.mean(x))))))
        ax = f.add_subplot(111)
        ax.axhline(y, c='k', ls='-', zorder=0)
        ax.plot(t_traj[s_traj], y + norm(traj.radius[s_traj]), 'b-', lw=2, zorder=1); y += dy
        ax.plot(t_EEG[s_EEG], y + norm(EEG[s_EEG]), 'k-', lw=1, zorder=1); y += dy
        ax.plot(t_theta[s_theta], y + norm(x_theta[s_theta]), 'k-', lw=1, zorder=1); y += dy
        ax.plot(t_theta[s_theta], y + norm(ZP_theta[s_theta]), 'k-', lw=1, zorder=1); y += dy
        ax.plot(t_theta[s_theta], y + norm(f_theta[s_theta]), 'k-', lw=1, zorder=1); y += dy

        ax.axvspan(t_scan['downshift'], t_scan['start'], lw=0, fc='g', alpha=0.3, zorder=-2)
        ax.axvspan(t_scan['start'], t_scan['max'], lw=0, fc='m', alpha=0.3, zorder=-2)
        ax.axvspan(t_scan['max'], t_scan['return'], lw=0, fc='y', alpha=0.3, zorder=-2)
        ax.axvspan(t_scan['return'], t_scan['end'], lw=0, fc='m', alpha=0.3, zorder=-2)
        ax.axvspan(t_scan['end'], t_scan['upshift'], lw=0, fc='g', alpha=0.3, zorder=-2)

        ax.set_yticks([0, -2, -4, -6, -8])
        ax.set_yticklabels(['r', 'EEG', 'theta', 'ZP_theta', 'f_theta'])
        ax.set_ylim(-8.75, 1.25)
        ax.set_xlim(*window)
        ax.tick_params(top=False, right=False)

        plt.ion()
        plt.show()
Beispiel #3
0
    def plot_scan_example(self, rds=(95,4,1), scan_number=22, margin=1.0):
        """Plot example scan with relevant behavior variables and scan phases
        """
        data = SessionData.get(rds)
        scan_table = get_node('/behavior', 'scans')
        scan = get_unique_row(scan_table,
            data.session_query + '&(number==%d)'%scan_number)

        t_scan = { k: data.T_(scan[k]) for k in ScanPoints }
        window = t_scan['downshift'] - margin, t_scan['upshift'] + margin

        plt.ioff()
        f = self.new_figure('scan_example',
            'Scan Example: Rat %d, Day %d, M%d @ t=%.2f'%(rds + (window[0],)))

        traj = data.trajectory
        t = data.T_(traj.ts)
        s = time_slice(t, start=window[0], end=window[1])
        scan_slice = time_slice(t, start=t_scan['start'], end=t_scan['end'])

        y = 0
        dy = -2
        norm = lambda x: x / np.max(np.abs(x))
        ax = f.add_subplot(111)
        ax.axhline(y, c='k', ls='-', zorder=0)
        ax.plot(t[s], y + norm(traj.radius[s]), 'b-', lw=2, zorder=1); y += dy
        ax.axhline(y, c='k', ls='-', zorder=0)
        ax.plot(t[s], y + norm(traj.radial_velocity[s]), 'b-', lw=2, zorder=1); y += dy
        ax.axhline(y, c='k', ls='-', zorder=0)
        ax.plot(t[s], y + norm(traj.forward_velocity[s]), 'b-', lw=2, zorder=1); y += dy

        ax.axvspan(t_scan['downshift'], t_scan['start'], lw=0, fc='g', alpha=0.3, zorder=-2)
        ax.axvspan(t_scan['start'], t_scan['max'], lw=0, fc='m', alpha=0.3, zorder=-2)
        ax.axvspan(t_scan['max'], t_scan['return'], lw=0, fc='y', alpha=0.3, zorder=-2)
        ax.axvspan(t_scan['return'], t_scan['end'], lw=0, fc='m', alpha=0.3, zorder=-2)
        ax.axvspan(t_scan['end'], t_scan['upshift'], lw=0, fc='g', alpha=0.3, zorder=-2)

        for k in ScanPoints:
            ax.axvline(t_scan[k], c='k', ls='-', zorder=-1)

        ax.set_yticks([0, -2, -4])
        ax.set_yticklabels(['r', 'rs', 'fwd'])
        ax.set_ylim(-4.75, 1.25)
        ax.set_xlim(*window)
        ax.tick_params(top=False, right=False)

        f = self.new_figure('scan_example_space', 'Scan Example', (4,4))
        ax = f.add_subplot(111)
        ax.plot(traj.x[s], traj.y[s], 'k-', alpha=0.6)
        ax.plot(traj.x[scan_slice], traj.y[scan_slice], 'r-', lw=2)
        ax.axis('equal')
        ax.set_axis_off()
        plot_track_underlay(ax=ax, ls='dotted')

        plt.ion()
        plt.show()
Beispiel #4
0
def show_sample_rates():
    fs_list = []
    tetrodes = get_node('/metadata', 'tetrodes')

    for rec in tetrodes.where('EEG==True'):
        maze = 1
        rds = (rec['rat'], rec['day'], maze)
        fs = get_eeg_sample_rate(rds, rec['tt'])

        if fs not in fs_list:
            fs_list.append(fs)

        if fs is not None and not (990 < fs < 1010):
            print 'Rat %d, Day %d, M%d, Sc%02d = %.1f Hz' % (rds +
                                                             (rec['tt'], fs))

    return fs_list
Beispiel #5
0
    def collect_data(self):
        """Collate ripple and head-scan events across CA3/CA1 datasets
        """
        scan_table = get_node('/behavior', 'scans')
        ripple_table = get_node('/physiology', 'ripples')

        # Get datasets, sessions, and rats with detected ripples
        self.results['datasets'] = dataset_list = unique_datasets(ripple_table)
        self.results['N_datasets'] = len(dataset_list)
        session_list = []
        for dataset in dataset_list:
            session_list.extend([dataset + (maze,)
                for maze in get_maze_list(*dataset)])
        self.results['sessions'] = session_list
        self.results['N_sessions'] = len(session_list)
        self.results['rats'] = rat_list = unique_rats(ripple_table)
        self.results['N_rats'] = len(rat_list)

        # Open a new data file
        data_file = self.open_data_file()
        array_group = data_file.createGroup('/', 'arrays', title='Array Data')
        session_table = data_file.createTable('/', 'sessions', SessionDescr,
            'Sessions for Scan-Ripple Analysis')

        # Loop through sessions, detecting ripples and getting head scans
        id_fmt = 'data_%06d'
        array_id = 0
        session_id = 0
        row = session_table.row
        for rds in session_list:
            rds_str = 'rat%d-%02d-m%d...'%rds
            self.out('Loading data for %s'%rds_str)
            data = SessionData.get(rds)
            theta_tt = find_theta_tetrode(rds[:2], condn='(EEG==True)&(area=="CA1")')
            if theta_tt is None:
                continue
            theta_tt = theta_tt[0]
            self.out('Using theta tetrode Sc%02d.'%theta_tt)

            row['id'] = session_id
            row['rat'], row['day'], row['session'] = rds
            if data.attrs['type'] in ('STD', 'MIS'):
                row['type'] = 'DR'
            else:
                row['type'] = 'NOV'

            # Compute smoothed theta power and frequency time-series
            ts, EEG = get_eeg_timeseries(rds, theta_tt)
            t = data.T_(ts) # time represented as elapsed time within session
            t_theta, x_theta = Theta.timeseries(t, EEG)
            ZP_theta = Z(Theta.power(x_theta, filtered=True))
            f_theta = quick_boxcar(Theta.frequency(x_theta, filtered=True),
                M=(lambda i: (i % 2 == 0) and (i+1) or i)(int(Theta.fs * THETA_FREQ_SMOOTHING)))

            # Get scans, pauses, and ripples
            scans = data.T_([tuple(map(lambda k: rec[k], ScanPoints))
                for rec in scan_table.where(data.session_query)])
            pauses = data.T_(data.pause_list)
            ripples = data.T_([(rec['start'], rec['peak'], rec['end'])
                for rec in ripple_table.where(data.session_query)])

            # Save the array data as resources for analysis
            data_file.createArray(array_group, id_fmt%array_id, t_theta,
                title='%s t_theta'%rds_str)
            row['t_theta'] = id_fmt%array_id
            array_id += 1

            data_file.createArray(array_group, id_fmt%array_id, ZP_theta,
                title='%s ZP_theta'%rds_str)
            row['ZP_theta'] = id_fmt%array_id
            array_id += 1

            data_file.createArray(array_group, id_fmt%array_id, f_theta,
                title='%s f_theta'%rds_str)
            row['f_theta'] = id_fmt%array_id
            array_id += 1

            data_file.createArray(array_group, id_fmt%array_id, scans,
                title='%s scans'%rds_str)
            row['scans'] = id_fmt%array_id
            array_id += 1

            data_file.createArray(array_group, id_fmt%array_id, pauses,
                title='%s pauses'%rds_str)
            row['pauses'] = id_fmt%array_id
            array_id += 1

            data_file.createArray(array_group, id_fmt%array_id, ripples,
                title='%s ripples'%rds_str)
            row['ripples'] = id_fmt%array_id
            array_id += 1

            row.append()
            if array_id % 10 == 0:
                session_table.flush()
            session_id += 1

        # Good-bye
        session_table.flush()
        self.out('All done!')
Beispiel #6
0
    def run_ripple_xcorrs(self, lag=4, numbins=71, ripple_lock='peak'):
        """Compute scan-ripple cross-correlograms
        """
        # Load results data
        data_file = self.get_data_file()
        sessions = data_file.root.sessions
        scan_table = get_node('/behavior', 'scans')

        # Correlogram bins
        edges = np.linspace(-lag, lag, numbins+1)
        centers = (edges[:-1] + edges[1:]) / 2

        # Overall scanning xcorrs
        self.out("Computing ripple-scan cross-correlations...")
        r_ix = dict(start=0, peak=1, end=2)[ripple_lock]
        C = { k: np.zeros(numbins, 'd') for k in ScanPoints }
        C['pstart'] = np.zeros(numbins, 'd')
        C['pend'] = np.zeros(numbins, 'd')

        for session in sessions.iterrows():
            scans = data_file.getNode('/arrays', session['scans'])
            pauses = data_file.getNode('/arrays', session['pauses'])
            ripples = data_file.getNode('/arrays', session['ripples'])
            if (len(scans) and len(ripples)):
                for i, pt in enumerate(ScanPoints):
                    C[pt] += xcorr(scans[:,i], ripples[:,r_ix], maxlag=lag, bins=numbins)[0]
            if (len(pauses) and len(ripples)):
                C['pstart'] += xcorr(pauses[:,0], ripples[:,1], maxlag=lag, bins=numbins)[0]
                C['pend'] += xcorr(pauses[:,1], ripples[:,1], maxlag=lag, bins=numbins)[0]

        f = self.new_figure('xcorrs', 'Scan-Ripple Correlations', (11,12))

        ax = f.add_subplot(321)
        ax.plot(centers, C['downshift'], drawstyle='steps-mid', label='down')
        ax.plot(centers, C['upshift'], drawstyle='steps-mid', label='up')
        ax.legend(loc=0)
        ax.set(xlim=(-lag, lag), xticks=[-lag, -lag/2., 0, lag/2., lag],
            yticks=[])
        ax.set_ylim(bottom=0)
        ax.tick_params(top=False)
        quicktitle(ax, 'Gear Shifting x Ripples')

        ax = f.add_subplot(322)
        ax.plot(centers, C['start'], drawstyle='steps-mid', label='start')
        ax.plot(centers, C['end'], drawstyle='steps-mid', label='end')
        ax.legend(loc=0)
        ax.set(xlim=(-lag, lag), xticks=[-lag, -lag/2., 0, lag/2., lag],
            yticks=[])
        ax.set_ylim(bottom=0)
        ax.tick_params(top=False)
        quicktitle(ax, 'Scans x Ripples')

        ax = f.add_subplot(323)
        ax.plot(centers, C['max'], drawstyle='steps-mid', label='max')
        ax.plot(centers, C['return'], drawstyle='steps-mid', label='return')
        ax.legend(loc=0)
        ax.set(xlim=(-lag, lag), xticks=[-lag, -lag/2., 0, lag/2., lag],
            yticks=[])
        ax.set_ylim(bottom=0)
        ax.tick_params(top=False)
        quicktitle(ax, 'Dwells x Ripples')

        ax = f.add_subplot(324)
        ax.plot(centers, C['pstart'], drawstyle='steps-mid', label='pause start')
        ax.plot(centers, C['pend'], drawstyle='steps-mid', label='pause end')
        ax.legend(loc=0)
        ax.set(xlim=(-lag, lag), xticks=[-lag, -lag/2., 0, lag/2., lag],
            yticks=[])
        ax.set_ylim(bottom=0)
        ax.tick_params(top=False)
        quicktitle(ax, 'Pauses x Ripples')

        # Ripple-event fractions across event and experiment types
        self.out("Computing ripple-event fractions...")
        event_types = ('gearshifts', 'scans', 'pauses')
        expt_types = ('DR', 'NOV')
        frac = np.empty((len(event_types)*len(expt_types),), 'd')
        frac_rat_mu = np.empty_like(frac)
        frac_rat_sem = np.empty_like(frac)
        labels = []
        i = 0
        for expt in expt_types:
            for event in event_types:
                hits = N = 0
                hits_rat = {}
                N_rat = {}

                for session in sessions.where('type=="%s"'%expt):
                    if event in ('gearshifts', 'scans'):
                        events = data_file.getNode('/arrays', session['scans'])
                        if event == 'scans':
                            events = events[:,ScanPhases['scan']] # start -> end
                        elif event == 'gearshifts':
                            events = events[:,ScanPhases['related']] # down -> upshift
                    else:
                        events = data_file.getNode('/arrays', session[event])

                    ripples = data_file.getNode('/arrays', session['ripples'])
                    N_events = events.shape[0]
                    if N_events == 0:
                        continue

                    # Count hits based on ripple peaks and 1) pauses: start->end,
                    # and 2) ripples: down->upshift and start->end
                    H = 0
                    for v in events:
                        H += int(np.any(np.logical_and(
                            ripples[:,r_ix] >= v[0], ripples[:,r_ix] < v[-1])))

                    N += N_events
                    hits += H

                    if session['rat'] in N_rat:
                        hits_rat[session['rat']] += H
                        N_rat[session['rat']] += N_events
                    else:
                        hits_rat[session['rat']] = H
                        N_rat[session['rat']] = N_events

                labels.append('%s %s'%(expt, event))

                frac[i] = hits / float(N)
                frac_rat = [hits_rat[rat] / float(N_rat[rat])
                    for rat in N_rat]
                frac_rat_mu[i] = np.mean(frac_rat)
                frac_rat_sem[i] = np.std(frac_rat) / np.sqrt(len(N_rat))

                i += 1

        ax = f.add_subplot(325)
        x = np.array([0, 0.5, 1.0, 2, 2.5, 3.0])
        fmt = dict(mfc='w', mec='k', mew=1, ms=6)
        ax.plot(x + 0.075, frac, 'o', label='overall', **fmt)
        ax.errorbar(x - 0.075, frac_rat_mu, yerr=frac_rat_sem, fmt='s',
            ecolor='k', elinewidth=1.5, capsize=5, label='across rats', **fmt)
        ax.set_xticks(x)
        ax.set_xticklabels(labels, size='small', rotation=45)
        ax.set_xlim(-0.5, 3.5)
        ax.set_ylim(bottom=0)
        ax.set_xlabel('Events')
        ax.set_ylabel('Fraction')
        ax.tick_params(top=False, right=False)
        quicktitle(ax, 'Fraction of Events with Ripples', size='small')

        # Fraction of behavior events across session type containing ripples
        self.out("Computing ripple-phase distributions...")
        phase_partition = ('downshift', 'out', 'dwell', 'inb', 'upshift')
        counts = np.zeros(len(phase_partition))
        counts_rat = {}

        for i, phase in enumerate(phase_partition):
            for session in sessions.iterrows():
                ripples = data_file.getNode('/arrays', session['ripples'])
                scans = data_file.getNode('/arrays', session['scans'])

                if not (len(ripples) and len(scans)):
                    continue

                phase_events = scans[:,ScanPhases[phase]]
                hits = np.sum(select_from(ripples[:,r_ix], phase_events))

                counts[i] += hits

                rat = session['rat']
                if rat not in counts_rat:
                    counts_rat[rat] = np.zeros(len(phase_partition))
                counts_rat[rat][i] += hits

        N_rats = len(counts_rat)
        p_phase = np.empty((N_rats, len(phase_partition)), 'd')
        for i, rat in enumerate(counts_rat.keys()):
            p_phase[i] = counts_rat[rat] / counts_rat[rat].sum()

        p_phase_mu = p_phase.mean(axis=0)
        p_phase_sem = p_phase.std(axis=0) / np.sqrt(N_rats)

        ax = f.add_subplot(326)
        x = np.arange(len(phase_partition))
        ax.plot(x + 0.1, counts / counts.sum(), 'o', label='overall', **fmt)
        ax.errorbar(x - 0.1, p_phase_mu, yerr=p_phase_sem, fmt='s',
            ecolor='k', elinewidth=1.5, capsize=5, label='across rats', **fmt)
        ax.set_xticks(x)
        ax.set_xticklabels(('Downshift', 'Outbound', 'Dwell', 'Inbound', 'Upshift'),
            size='small', rotation=45)
        ax.set_xlim(-0.5, len(phase_partition) - 0.5)
        ax.set_ylim(bottom=0)
        ax.set_xlabel('Head-Scan Phase')
        ax.set_ylabel('p[Phase]')
        ax.tick_params(top=False, right=False)
        ax.legend(loc=0)
        quicktitle(ax, 'P[phase|ripple]', size='small')

        plt.ion()
        plt.show()
Beispiel #7
0
    def collect_data(self, area_query='(area=="CA3")|(area=="CA1")'):
        """Collate ripple, theta power across head-scan events
        """
        scan_table = get_node('/behavior', 'scans')
        pause_table = get_node('/behavior', 'pauses')

        tetrode_query = '(%s)&(EEG==True)' % area_query
        dataset_list = TetrodeSelect.datasets(tetrode_query,
                                              allow_ambiguous=True)

        # Tables and iterators
        data_file = self.open_data_file()
        scan_data_table = data_file.createTable('/',
                                                'scan_data',
                                                BehDescr,
                                                title='Scan Data')
        pause_data_table = data_file.createTable('/',
                                                 'pause_data',
                                                 BehDescr,
                                                 title='Pause Data')
        ripple_data_table = data_file.createTable('/',
                                                  'ripple_data',
                                                  RippleDescr,
                                                  title='Ripple Data')
        scan_row = scan_data_table.row
        pause_row = pause_data_table.row
        ripple_row = ripple_data_table.row

        for dataset in dataset_list:
            rat, day = dataset

            # Find the tetrode based on the chosen tetrode strategy
            roi_tt = find_theta_tetrode(dataset, condn=tetrode_query)
            if type(roi_tt) is tuple:
                roi_tt = roi_tt[0]
            self.out('Rat%03d-%02d: using tetrode Sc%d' % (rat, day, roi_tt))

            # Loop through sessions
            for session in get_maze_list(rat, day):
                rds = rat, day, session
                data = SessionData.get(rds)

                ts, EEG = get_eeg_timeseries(rds, roi_tt)
                ripple_list = Ripple.detect(ts, EEG)
                if len(ripple_list):
                    ripple_peaks = np.array(ripple_list)[:, 1]
                else:
                    ripple_peaks = np.array([])

                ts_theta, x_theta = Theta.timeseries(ts, EEG)
                zpow = (lambda x:
                        (x - x.mean()) / x.std())(Theta.power(x_theta,
                                                              filtered=True))

                # Loop through scans and pauses
                for row, table in [(scan_row, scan_table),
                                   (pause_row, pause_table)]:
                    for rec in table.where(data.session_query):
                        theta = zpow[select_from(ts_theta, [rec['tlim']])]
                        row['id'] = rec['id']
                        row['rat'] = rat
                        row['theta_avg'] = theta.mean()
                        row['theta_max'] = theta.max()
                        row['ripples'] = select_from(ripple_peaks,
                                                     [rec['tlim']]).sum()
                        row.append()
                scan_data_table.flush()
                pause_data_table.flush()

                # Loop through ripples
                zpow_t = interp1d(ts_theta,
                                  zpow,
                                  fill_value=0.0,
                                  bounds_error=False)
                for t_ripple in ripple_peaks:
                    ripple_row['rat'] = rat
                    ripple_row['theta'] = zpow_t(
                        t_ripple)  # interpolate z-power at ripple peak
                    ripple_row['running'] = data.velocity_filter(t_ripple)
                    ripple_row['scan'] = np.any(
                        select_from([t_ripple], data.scan_list))
                    ripple_row['pause'] = np.any(
                        select_from([t_ripple], data.pause_list))
                    ripple_row.append()
                ripple_data_table.flush()

        self.out('All done!')
Beispiel #8
0
    def collect_data(self):
        """Collate theta power and head-scan events across CA1 datasets
        """
        tetrode_query = '(area=="CA1")&(EEG==True)'
        scan_table = get_node('/behavior', 'scans')
        potentiation_table = get_node('/physiology', 'potentiation')
        dataset_list = TetrodeSelect.datasets(tetrode_query,
                                              allow_ambiguous=True)

        psd_kwds = dict(Fs=1001.0,
                        NFFT=2048,
                        noverlap=1024,
                        scale_by_freq=True)
        # psd_kwds = dict(Fs=FullBand.fs, NFFT=256, noverlap=0, scale_by_freq=True)

        scan_psd = {}
        pause_psd = {}
        running_psd = {}

        for rat, day in dataset_list:
            theta_tt, base_theta = find_theta_tetrode((rat, day),
                                                      condn=tetrode_query,
                                                      ambiguous=True)
            self.out('Rat%03d-%02d: using tetrode Sc%d' % (rat, day, theta_tt))

            lfp = np.array([])
            scan_lfp = np.array([])
            pause_lfp = np.array([])
            running_lfp = np.array([])

            for session in get_maze_list(rat, day):
                self.out('Adding data from session %d...' % session)
                rds = rat, day, session
                data = SessionData.get(rds, load_clusters=False)

                ts, EEG = get_eeg_timeseries(rds, theta_tt)
                ts_full, x_full = ts, EEG  #FullBand._downsample(ts), FullBand._decimate(EEG)

                running_ix = data.filter_tracking_data(ts_full,
                                                       boolean_index=True,
                                                       **data.running_filter())

                lfp = np.r_[lfp, x_full]
                scan_lfp = np.r_[scan_lfp,
                                 x_full[select_from(ts_full, data.scan_list)]]
                pause_lfp = np.r_[
                    pause_lfp, x_full[select_from(ts_full, data.pause_list)]]
                running_lfp = np.r_[running_lfp, x_full[running_ix]]

            self.out('Computing and normalizing spectra...')
            Pxx, freqs = psd(lfp, **psd_kwds)
            Pxx_scan = np.squeeze(psd(scan_lfp, **psd_kwds)[0])
            Pxx_pause = np.squeeze(psd(pause_lfp, **psd_kwds)[0])
            Pxx_running = np.squeeze(psd(running_lfp, **psd_kwds)[0])
            if 'freqs' not in self.results:
                self.results['freqs'] = freqs

            full_power = np.trapz(Pxx, x=freqs)
            for P in Pxx_scan, Pxx_pause, Pxx_running:
                P /= full_power

            if rat in scan_psd:
                scan_psd[rat] = np.vstack((scan_psd[rat], Pxx_scan))
                pause_psd[rat] = np.vstack((pause_psd[rat], Pxx_pause))
                running_psd[rat] = np.vstack((running_psd[rat], Pxx_running))
            else:
                scan_psd[rat] = Pxx_scan[np.newaxis]
                pause_psd[rat] = Pxx_pause[np.newaxis]
                running_psd[rat] = Pxx_running[np.newaxis]

        rat_list = sorted(scan_psd.keys())
        self.out('Averaging spectra for %d rats...' % len(rat_list))

        scan_spectra = np.empty((len(rat_list), len(freqs)), 'd')
        pause_spectra = np.empty_like(scan_spectra)
        running_spectra = np.empty_like(scan_spectra)
        for i, rat in enumerate(rat_list):
            scan_spectra[i] = scan_psd[rat].mean(axis=0)
            pause_spectra[i] = pause_psd[rat].mean(axis=0)
            running_spectra[i] = running_psd[rat].mean(axis=0)

        self.results['rat_list'] = np.array(rat_list)
        self.results['scan_psd'] = scan_spectra
        self.results['pause_psd'] = pause_spectra
        self.results['running_psd'] = running_spectra

        self.out('All done!')
Beispiel #9
0
    def generate_signal_xcorrs(self, lag=3):

        scan_table = get_node('/behavior', 'scans')

        data_file = self.get_data_file(mode='a')
        session_table = data_file.root.sessions
        signals = self.results['signals']
        scan_points = self.results['scan_points']

        lag_samples = int(lag * Theta.fs)

        def compute_xcorr_averages(signal):
            rat_slices = {k: {} for k in scan_points}
            for session in session_table.iterrows():
                rat = session['rat']
                session_query = '(rat==%(rat)d)&(day==%(day)d)&(session==%(session)d)' % session

                t_theta = data_file.getNode('/arrays',
                                            session['t_theta']).read()
                x_theta = data_file.getNode('/arrays', session[signal]).read()

                # matrix of session timing of scan points (rows) for each scan (columns)
                t_event_points = stamp_to_time(np.array([
                    tuple(scan[k] for k in scan_points)
                    for scan in scan_table.where(session_query)
                ]),
                                               zero_stamp=session['start']).T
                if t_event_points.size == 0:
                    continue

                for i, point in enumerate(scan_points):
                    t_point_scans = t_event_points[i]

                    for t_scan in t_point_scans:
                        scan_ix = np.argmin(np.abs(t_theta - t_scan))
                        start_ix, end_ix = scan_ix - lag_samples, scan_ix + lag_samples
                        if start_ix < 0 or end_ix > x_theta.size:
                            continue

                        signal_slice = x_theta[start_ix:end_ix + 1]

                        if rat in rat_slices[point]:
                            if signal_slice.size != rat_slices[point][
                                    rat].shape[1]:
                                continue
                            rat_slices[point][rat] = np.vstack(
                                (rat_slices[point][rat], signal_slice))
                        else:
                            rat_slices[point][rat] = signal_slice[np.newaxis]
                self.out.printf('.', color='lightgreen')
            self.out.printf('\n')

            def compute_rat_averages():
                mu = {}
                for point in scan_points:
                    N_rats = len(rat_slices[point].keys())
                    averages = []
                    for rat in rat_slices[point].keys():
                        averages.append(rat_slices[point][rat].mean(axis=0))
                    mu[point] = np.array(averages)
                return mu

            return compute_rat_averages()

        if hasattr(data_file.root, 'xcorr_data'):
            data_file.removeNode(data_file.root, 'xcorr_data', recursive=True)
        xcorr_data_group = data_file.createGroup(
            '/',
            'xcorr_data',
            title='Rat Averages for Continous Signal Cross-Correlations')

        for signal in signals:
            self.out('Generating %s xcorr data...' % signal)
            averages = compute_xcorr_averages(signal)
            for point in scan_points:
                data_file.createArray(xcorr_data_group,
                                      self._get_xcorr_array_name(
                                          signal, point),
                                      averages[point],
                                      title='Rat Averages for %s and Scan %s' %
                                      (signal, point.title()))
            data_file.flush()

        xcorr_data_group._v_attrs['lag'] = float(lag)
        self.close_data_file()
Beispiel #10
0
    def collect_data(self,
                     test='place',
                     place_field='pass',
                     min_quality='fair',
                     allow_ambiguous=True):
        """Tally place fields across areas

        Keyword arguments similar to info_scores.InfoScoreData. Remaining
        keywords are passed to TetrodeSelect.
        """
        # Metadata for determining valid fields
        self.results['test'] = test
        self.results['place_field'] = place_field
        self.results['min_quality'] = min_quality
        self.results['allow_ambiguous'] = allow_ambiguous
        if place_field == 'all':
            self.test = 'place'

        # Construct place cell selection criteria based on keyword arguments
        if test == 'place':
            SpatialTest = SpatialInformationCriteria
        elif test == 'skaggs':
            SpatialTest = SkaggsCriteria
        elif test == 'olypher':
            SpatialTest = OlypherCriteria
        else:
            raise ValueError, 'bad test value: %s' % test
        MinQuality = get_min_quality_criterion(min_quality)
        CellCriteria = AND(PrincipalCellCriteria, SpikeCountCriteria,
                           MinQuality)
        if place_field == 'pass':
            CellCriteria = AND(CellCriteria, SpatialTest)
        elif place_field == 'fail':
            CellCriteria = AND(CellCriteria, NOT(SpatialTest))
        elif place_field != 'all':
            raise ValueError, 'bad place_field value: %s' % place_field

        # Walk the tree and count place fields
        N = {}
        N_cells = {}
        N_sessions = {}
        sessions = set()
        tetrodes = get_node('/metadata', 'tetrodes')
        for area in AREAS.keys():
            for subdiv in (['all'] + AREAS[area]):
                self.out('Walking datasets for %s %s...' % (area, subdiv))
                key = '%s_%s' % (area, subdiv)
                N[key] = 0
                N_cells[key] = 0
                N_sessions[key] = 0

                area_query = 'area=="%s"' % area
                if subdiv != 'all':
                    area_query = '(%s)&(subdiv=="%s")' % (area_query, subdiv)

                for dataset in TetrodeSelect.datasets(
                        area_query, allow_ambiguous=allow_ambiguous):
                    Criteria = AND(
                        CellCriteria,
                        TetrodeSelect.criterion(
                            dataset,
                            area_query,
                            allow_ambiguous=allow_ambiguous))
                    dataset_cells = set()

                    for maze in get_maze_list(*dataset):
                        rds = dataset + (maze, )
                        data = SessionData.get(rds)
                        sessions.add(rds)
                        place_cell_clusters = data.get_clusters(
                            request=Criteria)
                        N[key] += len(place_cell_clusters)
                        dataset_cells.update(place_cell_clusters)
                        N_sessions[key] += 1

                    N_cells[key] += len(dataset_cells)

        self.out.timestamp = False
        self.results['N'] = N
        self.out('Total number of sessions = %d' % len(sessions))
        for key in sorted(N.keys()):
            self.out('N_cells[%s] = %d cells' % (key, N_cells[key]))
            self.out('N_sessions[%s] = %d sessions' % (key, N_sessions[key]))
            self.out('N_cell_sessions[%s] = %d cell-sessions' % (key, N[key]))

        # Good-bye
        self.out('All done!')
Beispiel #11
0
    def collect_data(self, dataset=(57, 1), lag=0.25):
        """Detect all ripples in dataset and plot EEG, ripple-band, and power
        signals along with detected event boundaries
        """
        ripple_table = get_node('/physiology', 'ripples')
        tetrode_query = '(area=="CA1")&(EEG==True)'
        dataset_query = '(rat==%d)&(day==%d)' % dataset
        pyr_tetrodes = find_pyramidale_tetrodes(dataset, verbose=False)
        rat, day = dataset

        # Initialize accumulators
        time_slices = []
        EEG_slices = []
        power_slices = []
        events = []
        timestamps = []

        Ripple = RippleFilter()

        # Loop through sessions, detecting and storing ripple slices
        for rds in unique_sessions(ripple_table, condn=dataset_query):
            data = SessionData.get(rds)

            self.out('Loading data for rat%03d-%02d-m%d...' % rds)
            ts = None
            EEG = None
            P = None
            for tt in pyr_tetrodes:
                X = get_eeg_timeseries(rds, tt)
                if X is None:
                    continue
                if ts is None:
                    ts = X[0]
                if EEG is None:
                    EEG = X[1]
                else:
                    EEG = np.vstack((EEG, X[1]))
                if P is None:
                    P = Ripple.power(X[1])
                else:
                    P = np.vstack((P, Ripple.power(X[1])))

            if P.ndim == 2:
                P = np.mean(P, axis=0)

            ts_ripples = [(rec['start'], rec['peak'], rec['end'])
                          for rec in ripple_table.where(data.session_query)]
            t = data.T_(ts)

            for timing in ts_ripples:
                start, peak, end = data.T_(timing)
                chunk = time_slice(t, peak - lag, peak + lag)
                time_slices.append(t[chunk] - peak)
                EEG_slices.append(EEG[..., chunk])
                power_slices.append(P[chunk])
                events.append((start - peak, end - peak))
                timestamps.append(timing[1])

        self.out('Plotting EEG traces of ripple events...')
        LW = 0.4
        norm = lambda x: x.astype('d') / float(CLIP)
        for i, ax in self.get_plot(range(len(time_slices))):
            t_chunk = time_slices[i]
            traces = EEG_slices[i]
            if traces.ndim == 1:
                ax.plot(t_chunk,
                        norm(traces),
                        'k-',
                        lw=1.5 * LW,
                        alpha=1,
                        zorder=0)
            else:
                ax.plot(t_chunk,
                        norm(traces).T,
                        'k-',
                        lw=LW,
                        alpha=0.5,
                        zorder=-1)
                ax.plot(t_chunk,
                        norm(np.mean(traces, axis=0)),
                        'k-',
                        lw=LW,
                        alpha=1,
                        zorder=0)
            ax.plot(t_chunk,
                    power_slices[i] / power_slices[i].max(),
                    'b-',
                    lw=1.5 * LW,
                    alpha=1,
                    zorder=1)
            ax.axhline(0, ls='-', c='k', lw=LW, zorder=0)
            ax.axvline(events[i][0], ls='-', c='k', lw=LW, zorder=2)
            ax.axvline(0, ls=':', c='k', lw=LW, zorder=2, alpha=0.5)
            ax.axvline(events[i][1], ls='-', c='k', lw=LW, zorder=2)
            ax.set_xlim(-lag, lag)
            ax.set_ylim(-1, 1)
            ax.set_axis_off()
            quicktitle(ax, '%d' % timestamps[i], size='xx-small')

            self.out.printf('.')
        self.out.printf('\n')
Beispiel #12
0
    def create_outcome_table(self,
                             event_table='potentiation',
                             half_windows=HALF_WINDOWS_DEFAULT):
        outcome_table_description = dict(scan_cell_id=tb.UInt32Col(pos=1))
        window_cols = map(lambda h: 'window_%d' % h, half_windows)

        def add_halfwindow_column_descriptors(descr):
            pos = 2
            for col in window_cols:
                descr[col] = tb.BoolCol(pos=pos)
                pos += 1

        add_halfwindow_column_descriptors(outcome_table_description)
        data_file = self.get_data_file(mode='a')
        outcome_table = create_table(data_file,
                                     '/',
                                     'hit_outcome',
                                     outcome_table_description,
                                     title='Place-Field Event (Hit) Outcomes')
        row = outcome_table.row

        scan_cell_table = data_file.root.scan_cell_info
        scans = get_node('/behavior', 'scans')
        potentiation = get_node('/physiology', event_table)
        outcome_table._v_attrs['event_table'] = event_table

        # Adjust hit angle for depotentiation hack, in which event is actually last active traversal
        hit_angle = -360.0  # forward one lap
        if event_table == 'depotentiation':
            hti_angle = 0.0  # same lap

        def precache_sessions():
            [
                SessionData.get(rds, load_clusters=False)
                for rds in unique_sessions(scan_cell_table)
            ]

        precache_sessions()

        @memoize
        def cell_query(session, tc):
            return '(%s)&(tc=="%s")' % (session.session_query, tc)

        def print_test_indicator(result):
            color = result and 'green' or 'red'
            self.out.printf(u'\u25a1', color=color)

        def test_for_event_hit(scan_angle, h, bounds):
            window = tuple(scan_angle + hit_angle + np.array([h, -h]))
            start_before = bounds[0] >= window[1]
            end_after = bounds[1] <= window[0]
            return start_before and end_after

        for pair in scan_cell_table.iterrows():
            self.out.printf('|', color='cyan')

            rds = pair['rat'], pair['day'], pair['session']
            session = SessionData.get(rds, load_clusters=False)
            event_bounds = lambda t: tuple(
                session.F_('alpha_unwrapped')(session.T_(t)))

            scan_angle = pair['angle']
            row['scan_cell_id'] = pair['id']

            for event in potentiation.where(cell_query(session, pair['tc'])):
                self.out.printf('|', color='lightgray')
                bounds = event_bounds(event['tlim'])

                for h, col in zip(half_windows, window_cols):
                    row[col] = test_for_event_hit(scan_angle, h, bounds)
                    print_test_indicator(row[col])

            row.append()
            if pair['id'] % 100 == 0:
                outcome_table.flush()
                self.out.printf(' [flush]\n%d / %d scan-cell pairs\n' %
                                (outcome_table.nrows, scan_cell_table.nrows),
                                color='lightgray')

        self.out.printf('\n')
        self.close_data_file()
Beispiel #13
0
    def generate_index_validation_set(self, N=100):
        data_file = self.get_data_file(mode='r')
        scan_cell_table = data_file.root.scan_cell_info
        scan_table = get_node('/behavior', 'scans')
        index = ScanFiringDeviationIndex()

        def get_subdir(name):
            subdir = os.path.join(self.datadir, name)
            if not os.path.isdir(subdir):
                os.makedirs(subdir)
            return subdir

        output_dir = get_subdir('index_validation')
        figure_dir = get_subdir(os.path.join('index_validation', 'figures'))

        spreadsheet = DataSpreadsheet(
            os.path.join(output_dir, 'scan_index_sample.csv'),
            [('sample', 'd'), ('rat', 'd'), ('day', 'd'), ('session', 'd'),
             ('cell', 's'), ('scan_number', 'd'), ('scan_start', 'd'),
             ('surprise', 'f')])
        rec = spreadsheet.get_record()

        sample_ix = np.random.permutation(scan_cell_table.nrows)[:N]
        scan_index = scan_cell_table.col('S')[sample_ix]
        sample_ix = sample_ix[np.argsort(scan_index)]

        plt.ioff()
        figure_files = []
        sample_id = 1

        self.out('Generating sample records and figures...')
        for ix in sample_ix:
            self.out.printf('.')

            row = scan_cell_table[ix]
            rds = row['rat'], row['day'], row['session']
            scan = scan_table[row['scan_id']]

            rec['sample'] = sample_id
            rec['rat'], rec['day'], rec['session'] = rds
            rec['cell'] = row['tc']
            rec['scan_number'] = scan['number']
            rec['scan_start'] = scan['start']
            rec['surprise'] = row['S']
            spreadsheet.write_record(rec)

            session = SessionData.get(rds)
            cluster = session.cluster_data(row['tc'])
            index.compute(session, scan, cluster, plot=True)
            figure_fn = os.path.join(figure_dir,
                                     'distros_%03d.pdf' % sample_id)
            figure_files.append(figure_fn)
            plt.savefig(figure_fn)
            plt.close()

            sample_id += 1

        self.out.printf('\n')
        spreadsheet.close()
        plt.ion()

        def concatenate_distro_figures_into_report():
            pdftk = '/opt/local/bin/pdftk'
            if os.path.exists(pdftk):
                distro_report = os.path.join(output_dir, 'distro_figures.pdf')
                retcode = subprocess.call([pdftk] + figure_files +
                                          ['cat', 'output', distro_report])
                if retcode == 0:
                    self.out('Saved distribution figures to:\n%s' %
                             distro_report)
                else:
                    self.out('Error saving figure report.', error=True)

        concatenate_distro_figures_into_report()
Beispiel #14
0
    def create_scan_cell_table(self, scan_phase='scan'):
        """For every scan–cell pair, compute the relative index of cell firing that
        occurred during the scan and previous cell firing on the track
        """
        scan_table_description = {
            'id': tb.UInt32Col(pos=1),
            'scan_id': tb.UInt16Col(pos=2),
            'rat': tb.UInt16Col(pos=3),
            'day': tb.UInt16Col(pos=4),
            'session': tb.UInt16Col(pos=5),
            'session_start_angle': tb.FloatCol(pos=6),
            'session_end_angle': tb.FloatCol(pos=7),
            'tc': tb.StringCol(itemsize=8, pos=8),
            'type': tb.StringCol(itemsize=4, pos=9),
            'expt_type': tb.StringCol(itemsize=4, pos=10),
            'area': tb.StringCol(itemsize=4, pos=11),
            'subdiv': tb.StringCol(itemsize=4, pos=12),
            'duration': tb.FloatCol(pos=13),
            'magnitude': tb.FloatCol(pos=14),
            'angle': tb.FloatCol(pos=15)
        }

        def add_scan_index_column_descriptors(descr):
            pos = 16
            for name in ScanIndex.AllNames:
                descr[name] = tb.FloatCol(pos=pos)
                pos += 1

        add_scan_index_column_descriptors(scan_table_description)

        data_file = self.get_data_file(mode='a')
        scan_cell_table = create_table(data_file,
                                       '/',
                                       'scan_cell_info',
                                       scan_table_description,
                                       title='Metadata for Scan-Cell Pairs')
        scan_cell_table._v_attrs['scan_phase'] = scan_phase
        row = scan_cell_table.row
        row_id = 0

        scans_table = get_node('/behavior', 'scans')
        sessions_table = get_node('/metadata', 'sessions')
        tetrodes_table = get_node('/metadata', 'tetrodes')

        cornu_ammonis_query = '(area=="CA1")|(area=="CA3")'
        hippocampal_datasets = unique_datasets('/metadata',
                                               'tetrodes',
                                               condn=cornu_ammonis_query)

        quality_place_cells = AND(get_min_quality_criterion(self.min_quality),
                                  PlaceCellCriteria)

        index = ScanIndex(scan_phase=scan_phase)

        for dataset in hippocampal_datasets:
            dataset_query = '(rat==%d)&(day==%d)' % dataset

            hippocampal_tetrodes = unique_values(
                tetrodes_table,
                column='tt',
                condn='(%s)&(%s)' % (dataset_query, cornu_ammonis_query))
            cluster_criteria = AND(
                quality_place_cells,
                get_tetrode_restriction_criterion(hippocampal_tetrodes))

            for maze in get_maze_list(*dataset):
                rds = dataset + (maze, )
                session = SessionData(rds=rds)
                place_cells = session.get_clusters(cluster_criteria)
                session_start_angle = np.median(
                    session.trajectory.alpha_unwrapped[:5])
                session_end_angle = np.median(
                    session.trajectory.alpha_unwrapped[-5:])

                self.out('Computing scan index for %s...' %
                         session.data_group._v_pathname)

                for scan in scans_table.where(session.session_query):
                    self.out.printf('|', color='cyan')

                    for cell in place_cells:
                        cluster = session.cluster_data(cell)

                        tt, cl = parse_cell_name(cluster.name)
                        tetrode = get_unique_row(
                            tetrodes_table, '(rat==%d)&(day==%d)&(tt==%d)' %
                            (rds[0], rds[1], tt))

                        row['id'] = row_id
                        row['scan_id'] = scan['id']
                        row['rat'], row['day'], row['session'] = rds
                        row['session_start_angle'] = session_start_angle
                        row['session_end_angle'] = session_end_angle
                        row['tc'] = cluster.name
                        row['type'] = session.attrs['type']
                        row['expt_type'] = get_unique_row(
                            sessions_table, session.session_query)['expt_type']
                        row['area'] = tetrode['area']
                        row['subdiv'] = tetrode['area'] + tetrode['subdiv'][:1]
                        row['angle'] = session.F_('alpha_unwrapped')(
                            session.T_(scan['start']))
                        row['duration'] = scan['duration']
                        row['magnitude'] = scan['magnitude']

                        for index_name in ScanIndex.AllNames:
                            row[index_name] = index.compute(
                                index_name, session, scan, cluster)

                        self.out.printf('.', color='green')
                        row_id += 1
                        row.append()

                        if row_id % 100 == 0:
                            scan_cell_table.flush()

                self.out.printf('\n')

        scan_cell_table.flush()
        self.out('Finished creating %s.' % scan_cell_table._v_pathname)