def collect_data(self, area_query='(area=="CA3")|(area=="CA1")'): """Collate ripple, theta power across head-scan events """ scan_table = get_node('/behavior', 'scans') pause_table = get_node('/behavior', 'pauses') tetrode_query = '(%s)&(EEG==True)' % area_query dataset_list = TetrodeSelect.datasets(tetrode_query, allow_ambiguous=True) # Tables and iterators data_file = self.open_data_file() scan_data_table = data_file.createTable('/', 'scan_data', BehDescr, title='Scan Data') pause_data_table = data_file.createTable('/', 'pause_data', BehDescr, title='Pause Data') ripple_data_table = data_file.createTable('/', 'ripple_data', RippleDescr, title='Ripple Data') scan_row = scan_data_table.row pause_row = pause_data_table.row ripple_row = ripple_data_table.row for dataset in dataset_list: rat, day = dataset # Find the tetrode based on the chosen tetrode strategy roi_tt = find_theta_tetrode(dataset, condn=tetrode_query) if type(roi_tt) is tuple: roi_tt = roi_tt[0] self.out('Rat%03d-%02d: using tetrode Sc%d' % (rat, day, roi_tt)) # Loop through sessions for session in get_maze_list(rat, day): rds = rat, day, session data = SessionData.get(rds) ts, EEG = get_eeg_timeseries(rds, roi_tt) ripple_list = Ripple.detect(ts, EEG) if len(ripple_list): ripple_peaks = np.array(ripple_list)[:, 1] else: ripple_peaks = np.array([]) ts_theta, x_theta = Theta.timeseries(ts, EEG) zpow = (lambda x: (x - x.mean()) / x.std())(Theta.power(x_theta, filtered=True)) # Loop through scans and pauses for row, table in [(scan_row, scan_table), (pause_row, pause_table)]: for rec in table.where(data.session_query): theta = zpow[select_from(ts_theta, [rec['tlim']])] row['id'] = rec['id'] row['rat'] = rat row['theta_avg'] = theta.mean() row['theta_max'] = theta.max() row['ripples'] = select_from(ripple_peaks, [rec['tlim']]).sum() row.append() scan_data_table.flush() pause_data_table.flush() # Loop through ripples zpow_t = interp1d(ts_theta, zpow, fill_value=0.0, bounds_error=False) for t_ripple in ripple_peaks: ripple_row['rat'] = rat ripple_row['theta'] = zpow_t( t_ripple) # interpolate z-power at ripple peak ripple_row['running'] = data.velocity_filter(t_ripple) ripple_row['scan'] = np.any( select_from([t_ripple], data.scan_list)) ripple_row['pause'] = np.any( select_from([t_ripple], data.pause_list)) ripple_row.append() ripple_data_table.flush() self.out('All done!')
def run(self, test='place', place_field='pass', min_quality='fair', **kwds): """Compute I_pos and I_spike across all criterion place cells in CA3/CA1 Keyword arguments: place_field -- 'pass', 'fail', or 'all' to restrict responses based on place field criterion test results test -- 'place', 'skaggs', or 'olypher' to use either the full place field test or one of the component tests for the cell filtering min_quality -- isolation quality threshold for filtering cells Remaining keywords are passed to TetrodeSelect. Returns (I_pos, I_spike) tuple of arrays for selected cell clusters. """ self.out = CPrint(prefix='ScatterInfo') area_query = '(area=="CA3")|(area=="CA1")' # Metadata for the plot title self.place_field = place_field self.test = test self.quality = min_quality if place_field == 'all': self.test = 'place' if test == 'place': SpatialTest = SpatialInformationCriteria elif test == 'skaggs': SpatialTest = SkaggsCriteria elif test == 'olypher': SpatialTest = OlypherCriteria else: raise ValueError, 'bad test value: %s' % test MinQuality = get_min_quality_criterion(min_quality) CellCriteria = AND(PrincipalCellCriteria, SpikeCountCriteria, MinQuality) if place_field == 'pass': CellCriteria = AND(CellCriteria, SpatialTest) elif place_field == 'fail': CellCriteria = AND(CellCriteria, NOT(SpatialTest)) elif place_field != 'all': raise ValueError, 'bad place_field value: %s' % place_field I = [] for dataset in TetrodeSelect.datasets(area_query): rat, day = dataset Criteria = AND( CellCriteria, TetrodeSelect.criterion(dataset, area_query, **kwds)) for maze in get_maze_list(*dataset): data = SessionData.get((rat, day, maze)) for tc in data.get_clusters(request=Criteria): cluster = data.cluster_data(tc) I.append((cluster.I_pos, cluster.I_spike)) self.I = I = np.array(I).T self.out('%d cell-sessions counted.' % I.shape[1]) return I[0], I[1]
def collect_data(self, area='CA1'): SessionDescr = { 'id': tb.UInt16Col(pos=1), 'rat': tb.UInt16Col(pos=2), 'day': tb.UInt16Col(pos=3), 'session': tb.UInt16Col(pos=4), 'start': tb.UInt64Col(pos=5), 'type': tb.StringCol(itemsize=4, pos=6), 't_theta': tb.StringCol(itemsize=16, pos=7), 'P_theta': tb.StringCol(itemsize=16, pos=8), 'f_theta': tb.StringCol(itemsize=16, pos=9), 'speed': tb.StringCol(itemsize=16, pos=10), 'radial_velocity': tb.StringCol(itemsize=16, pos=11), 'hd_velocity': tb.StringCol(itemsize=16, pos=12) } def get_area_query(area): if area == "CAX": return '(area=="CA1")|(area=="CA3")' return 'area=="%s"' % area tetrode_query = '(%s)&(EEG==True)' % get_area_query(area) self.out('Using tetrode query: %s' % tetrode_query) self.results['scan_points'] = ('start', 'max', 'return', 'end') dataset_list = TetrodeSelect.datasets(tetrode_query) def get_dataset_sessions(): sessions = [] for dataset in dataset_list: for maze in get_maze_list(*dataset): sessions.append(dataset + (maze, )) return sessions session_list = get_dataset_sessions() self.results['rats'] = rat_list = sorted( list(set(map(lambda d: d[0], dataset_list)))) self.results['N_rats'] = len(rat_list) data_file = self.open_data_file() array_group = data_file.createGroup('/', 'arrays', title='Scan and Signal Arrays') session_table = data_file.createTable( '/', 'sessions', SessionDescr, 'Sessions for Scan Cross-Correlation Analysis') id_fmt = 'data_%06d' array_id = 0 session_id = 0 row = session_table.row remove = [] for rds in session_list: rds_str = 'rat%d-%02d-m%d' % rds data = SessionData.get(rds) theta_tt = find_theta_tetrode(rds[:2], condn=tetrode_query) if theta_tt is None: remove.append(rds) continue theta_tt = theta_tt[0] row['id'] = session_id row['rat'], row['day'], row['session'] = rds row['type'] = (data.attrs['type'] in ('STD', 'MIS')) and 'DR' or 'NOV' row['start'] = data.start EEG = get_eeg_timeseries(rds, theta_tt) if EEG is None: remove.append(rds) continue ts_theta, x_theta = Theta.timeseries(*EEG) t_theta = data.T_(ts_theta) P_theta = zscore(Theta.power(x_theta, filtered=True)) f_theta = Theta.frequency(x_theta, filtered=True) speed = data.F_('speed')(t_theta) radial_velocity = np.abs(data.F_('radial_velocity')(t_theta)) hd_velocity = np.abs(data.F_('hd_velocity')(t_theta)) session_signals = [('t_theta', t_theta), ('P_theta', P_theta), ('f_theta', f_theta), ('speed', speed), ('radial_velocity', radial_velocity), ('hd_velocity', hd_velocity)] for k, d in session_signals: data_file.createArray(array_group, id_fmt % array_id, d, title='%s : %s' % (rds_str, k)) row[k] = id_fmt % array_id array_id += 1 self.out('Saved data from %s.' % rds_str) row.append() if array_id % 10 == 0: session_table.flush() session_id += 1 for rds in remove: session_list.remove(rds) self.results['sessions'] = session_list self.results['N_sessions'] = len(session_list) self.results['signals'] = ('P_theta', 'f_theta', 'speed', 'radial_velocity', 'hd_velocity') session_table.flush() self.close_data_file() self.out('All done!')
def collect_data(self): """Collate theta power and head-scan events across CA1 datasets """ tetrode_query = '(area=="CA1")&(EEG==True)' scan_table = get_node('/behavior', 'scans') potentiation_table = get_node('/physiology', 'potentiation') dataset_list = TetrodeSelect.datasets(tetrode_query, allow_ambiguous=True) psd_kwds = dict(Fs=1001.0, NFFT=2048, noverlap=1024, scale_by_freq=True) # psd_kwds = dict(Fs=FullBand.fs, NFFT=256, noverlap=0, scale_by_freq=True) scan_psd = {} pause_psd = {} running_psd = {} for rat, day in dataset_list: theta_tt, base_theta = find_theta_tetrode((rat, day), condn=tetrode_query, ambiguous=True) self.out('Rat%03d-%02d: using tetrode Sc%d' % (rat, day, theta_tt)) lfp = np.array([]) scan_lfp = np.array([]) pause_lfp = np.array([]) running_lfp = np.array([]) for session in get_maze_list(rat, day): self.out('Adding data from session %d...' % session) rds = rat, day, session data = SessionData.get(rds, load_clusters=False) ts, EEG = get_eeg_timeseries(rds, theta_tt) ts_full, x_full = ts, EEG #FullBand._downsample(ts), FullBand._decimate(EEG) running_ix = data.filter_tracking_data(ts_full, boolean_index=True, **data.running_filter()) lfp = np.r_[lfp, x_full] scan_lfp = np.r_[scan_lfp, x_full[select_from(ts_full, data.scan_list)]] pause_lfp = np.r_[ pause_lfp, x_full[select_from(ts_full, data.pause_list)]] running_lfp = np.r_[running_lfp, x_full[running_ix]] self.out('Computing and normalizing spectra...') Pxx, freqs = psd(lfp, **psd_kwds) Pxx_scan = np.squeeze(psd(scan_lfp, **psd_kwds)[0]) Pxx_pause = np.squeeze(psd(pause_lfp, **psd_kwds)[0]) Pxx_running = np.squeeze(psd(running_lfp, **psd_kwds)[0]) if 'freqs' not in self.results: self.results['freqs'] = freqs full_power = np.trapz(Pxx, x=freqs) for P in Pxx_scan, Pxx_pause, Pxx_running: P /= full_power if rat in scan_psd: scan_psd[rat] = np.vstack((scan_psd[rat], Pxx_scan)) pause_psd[rat] = np.vstack((pause_psd[rat], Pxx_pause)) running_psd[rat] = np.vstack((running_psd[rat], Pxx_running)) else: scan_psd[rat] = Pxx_scan[np.newaxis] pause_psd[rat] = Pxx_pause[np.newaxis] running_psd[rat] = Pxx_running[np.newaxis] rat_list = sorted(scan_psd.keys()) self.out('Averaging spectra for %d rats...' % len(rat_list)) scan_spectra = np.empty((len(rat_list), len(freqs)), 'd') pause_spectra = np.empty_like(scan_spectra) running_spectra = np.empty_like(scan_spectra) for i, rat in enumerate(rat_list): scan_spectra[i] = scan_psd[rat].mean(axis=0) pause_spectra[i] = pause_psd[rat].mean(axis=0) running_spectra[i] = running_psd[rat].mean(axis=0) self.results['rat_list'] = np.array(rat_list) self.results['scan_psd'] = scan_spectra self.results['pause_psd'] = pause_spectra self.results['running_psd'] = running_spectra self.out('All done!')
def collect_data(self): """Create a data structure with theta power/frequency samples with corresponding instantaneous velocity measurements such as path speed, head direction velocity, and radial velocity """ velocity_moments = ('speed', 'radial_velocity', 'hd_velocity') self.results['velocity_moments'] = velocity_moments tetrode_query = '(area=="CA1")&(EEG==True)' dataset_list = TetrodeSelect.datasets(tetrode_query, allow_ambiguous=True) samples = AutoVivification() def initialize_rat_samples(rat): for v_name in velocity_moments: samples[rat][v_name] = np.array([], float) samples[rat]['power'] = np.array([], float) samples[rat]['frequency'] = np.array([], float) def add_velocity_samples(rat, session, t): for moment in velocity_moments: add_data_sample(rat, moment, session.F_(moment)(t)) def add_data_sample(rat, key, data): samples[rat][key] = np.r_[samples[rat][key], data] for rat, day in dataset_list: theta_tt, base_theta = find_theta_tetrode((rat, day), condn=tetrode_query, ambiguous=True) if rat not in samples: initialize_rat_samples(rat) for maze in get_maze_list(rat, day): rds = rat, day, maze self.out('Session rat%03d-%02d-m%d: tetrode Sc%02d' % (rds + (theta_tt, ))) session = SessionData.get(rds, load_clusters=False) EEG = get_eeg_timeseries(rds, theta_tt) if EEG is None: continue ts, x = EEG ts_theta, x_theta = Theta.timeseries(ts, x) P_theta = zscore(Theta.power(x_theta, filtered=True)) f_theta = Theta.frequency(x_theta, filtered=True) ix_scanning = select_from(ts_theta, session.scan_list) t_theta_scanning = session.T_(ts_theta[ix_scanning]) add_velocity_samples(rat, session, t_theta_scanning) add_data_sample(rat, 'power', P_theta[ix_scanning]) add_data_sample(rat, 'frequency', f_theta[ix_scanning]) rat_list = sorted(list(set(samples.keys()))) self.out('Finished collected data for %d rats.' % len(rat_list)) sample_description = {k: tb.FloatCol() for k in velocity_moments} sample_description.update(rat=tb.UInt16Col(), power=tb.FloatCol(), frequency=tb.FloatCol()) data_file = self.open_data_file() results_table = data_file.createTable( '/', 'theta_velocity', sample_description, title='Theta and Velocity Data Across Rats') row = results_table.row self.out('Generating results table...') c = 0 for rat in rat_list: N = samples[rat]['power'].size self.out('Adding rat %d, with %d samples.' % (rat, N)) assert len(set(samples[rat][k].size for k in samples[rat].keys())) == 1 for i in xrange(N): row['rat'] = rat row['power'] = samples[rat]['power'][i] row['frequency'] = samples[rat]['frequency'][i] for moment in velocity_moments: row[moment] = samples[rat][moment][i] row.append() if c % 100 == 0: results_table.flush() if c % 500 == 0: self.out.printf('.') c += 1 self.out.printf('\n') self.out('Done!') self.close_data_file()
def collect_data(self, test='place', place_field='pass', min_quality='fair', allow_ambiguous=True): """Tally place fields across areas Keyword arguments similar to info_scores.InfoScoreData. Remaining keywords are passed to TetrodeSelect. """ # Metadata for determining valid fields self.results['test'] = test self.results['place_field'] = place_field self.results['min_quality'] = min_quality self.results['allow_ambiguous'] = allow_ambiguous if place_field == 'all': self.test = 'place' # Construct place cell selection criteria based on keyword arguments if test == 'place': SpatialTest = SpatialInformationCriteria elif test == 'skaggs': SpatialTest = SkaggsCriteria elif test == 'olypher': SpatialTest = OlypherCriteria else: raise ValueError, 'bad test value: %s' % test MinQuality = get_min_quality_criterion(min_quality) CellCriteria = AND(PrincipalCellCriteria, SpikeCountCriteria, MinQuality) if place_field == 'pass': CellCriteria = AND(CellCriteria, SpatialTest) elif place_field == 'fail': CellCriteria = AND(CellCriteria, NOT(SpatialTest)) elif place_field != 'all': raise ValueError, 'bad place_field value: %s' % place_field # Walk the tree and count place fields N = {} N_cells = {} N_sessions = {} sessions = set() tetrodes = get_node('/metadata', 'tetrodes') for area in AREAS.keys(): for subdiv in (['all'] + AREAS[area]): self.out('Walking datasets for %s %s...' % (area, subdiv)) key = '%s_%s' % (area, subdiv) N[key] = 0 N_cells[key] = 0 N_sessions[key] = 0 area_query = 'area=="%s"' % area if subdiv != 'all': area_query = '(%s)&(subdiv=="%s")' % (area_query, subdiv) for dataset in TetrodeSelect.datasets( area_query, allow_ambiguous=allow_ambiguous): Criteria = AND( CellCriteria, TetrodeSelect.criterion( dataset, area_query, allow_ambiguous=allow_ambiguous)) dataset_cells = set() for maze in get_maze_list(*dataset): rds = dataset + (maze, ) data = SessionData.get(rds) sessions.add(rds) place_cell_clusters = data.get_clusters( request=Criteria) N[key] += len(place_cell_clusters) dataset_cells.update(place_cell_clusters) N_sessions[key] += 1 N_cells[key] += len(dataset_cells) self.out.timestamp = False self.results['N'] = N self.out('Total number of sessions = %d' % len(sessions)) for key in sorted(N.keys()): self.out('N_cells[%s] = %d cells' % (key, N_cells[key])) self.out('N_sessions[%s] = %d sessions' % (key, N_sessions[key])) self.out('N_cell_sessions[%s] = %d cell-sessions' % (key, N[key])) # Good-bye self.out('All done!')
def collect_data(self, area_query='(area=="CA3")|(area=="CA1")'): """Collect firing rate data across scans """ datasets = TetrodeSelect.datasets(area_query) tetrode_table = get_node('/metadata', 'tetrodes') scan_table = get_node('/behavior', 'scans') epochs = ( 'rate', 'running_rate', 'pause_rate', 'scan_rate', 'interior_rate', 'exterior_rate', 'outbound_rate', 'inbound_rate', 'ext_out_rate', 'ext_in_rate', 'int_out_rate', 'int_in_rate' ) spreadsheet = DataSpreadsheet( os.path.join(self.datadir, 'scan_firing_rates.csv'), [ ('dataset', 's'), ('rat', 'd'), ('day', 'd'), ('area', 's'), ('area_sub', 's'), ('cell', 's') ] + map(lambda n: (n, 'f'), epochs)) self.out('Record string: %s'%spreadsheet.get_record_string()) record = spreadsheet.get_record() # Index labels for the scan data PRE, START, MAX, END = 0, 1, 2, 3 for dataset in datasets: rat, day = dataset dataset_str = 'rat%03d-%02d'%dataset self.out('Calculating scan firing rates for %s...'%dataset_str) # Set dataset info record['dataset'] = dataset_str record['rat'] = rat record['day'] = day # Cell accumulators collated_cells = [] N = {} T = {} def increment(tc, which, count, duration): N[tc][which] += count T[tc][which] += duration for maze in get_maze_list(rat, day): rds = rat, day, maze data = SessionData.get(rds) traj = data.trajectory def occupancy(traj_occupied): return \ data.duration * (np.sum(traj_occupied) / float(traj.N)) Criteria = AND(PlaceCellCriteria, TetrodeSelect.criterion(dataset, area_query)) for tc in data.get_clusters(Criteria): cluster = data.cluster_data(tc) if tc not in collated_cells: collated_cells.append(tc) N[tc] = { k: 0 for k in epochs } T[tc] = { k: 0.0 for k in epochs } spikes = cluster.spikes increment(tc, 'rate', cluster.N, data.duration) increment(tc, 'running_rate', data.velocity_filter(spikes).sum(), occupancy(data.velocity_filter(traj.ts))) increment(tc, 'scan_rate', np.sum(select_from(spikes, data.scan_list)), occupancy(select_from(traj.ts, data.scan_list))) increment(tc, 'pause_rate', np.sum(select_from(spikes, data.pause_list)), occupancy(select_from(traj.ts, data.pause_list))) ext_scan_list = np.array( [(rec['prepause'], rec['start'], rec['max'], rec['end']) for rec in scan_table.where(data.session_query + '&(type=="%s")'%EXTERIOR)]) int_scan_list = np.array( [(rec['prepause'], rec['start'], rec['max'], rec['end']) for rec in scan_table.where(data.session_query + '&(type=="%s")'%INTERIOR)]) both_scan_list = np.array( [(rec['prepause'], rec['start'], rec['max'], rec['end']) for rec in scan_table.where(data.session_query + '&(type!="%s")'%AMBIG)]) if ext_scan_list.shape[0]: increment(tc, 'exterior_rate', np.sum(select_from(spikes, ext_scan_list[:,(START,END)])), occupancy(select_from(traj.ts, ext_scan_list[:,(START,END)]))) increment(tc, 'ext_out_rate', np.sum(select_from(spikes, ext_scan_list[:,(START,MAX)])), occupancy(select_from(traj.ts, ext_scan_list[:,(START,MAX)]))) increment(tc, 'ext_in_rate', np.sum(select_from(spikes, ext_scan_list[:,(MAX,END)])), occupancy(select_from(traj.ts, ext_scan_list[:,(MAX,END)]))) if int_scan_list.shape[0]: increment(tc, 'interior_rate', np.sum(select_from(spikes, int_scan_list[:,(START,END)])), occupancy(select_from(traj.ts, int_scan_list[:,(START,END)]))) increment(tc, 'int_out_rate', np.sum(select_from(spikes, int_scan_list[:,(START,MAX)])), occupancy(select_from(traj.ts, int_scan_list[:,(START,MAX)]))) increment(tc, 'int_in_rate', np.sum(select_from(spikes, int_scan_list[:,(MAX,END)])), occupancy(select_from(traj.ts, int_scan_list[:,(MAX,END)]))) if both_scan_list.shape[0]: increment(tc, 'outbound_rate', np.sum(select_from(spikes, both_scan_list[:,(START,MAX)])), occupancy(select_from(traj.ts, both_scan_list[:,(START,MAX)]))) increment(tc, 'inbound_rate', np.sum(select_from(spikes, both_scan_list[:,(MAX,END)])), occupancy(select_from(traj.ts, both_scan_list[:,(MAX,END)]))) def firing_rate(tc, k): if T[tc][k]: return N[tc][k] / T[tc][k] return 0.0 self.out('Writing out spreadsheet records...') for tc in collated_cells: self.out.printf('.') tt, cl = parse_cell_name(tc) tetrode = get_unique_row(tetrode_table, '(rat==%d)&(day==%d)&(tt==%d)'%(rat, day, tt)) record['area'] = tetrode['area'] record['area_sub'] = tetrode['subdiv'] record['cell'] = tc record.update({ k: firing_rate(tc, k) for k in epochs }) spreadsheet.write_record(record) self.out.printf('\n') # Finish up spreadsheet.close() self.out('All done!')
def collect_data(self, band='theta', distro='frequency', drange=None, nbins=128): """Collate frequency and head-scan events across CA1 datasets """ self.results['distro'] = distro tetrode_query = '(area=="CA1")&(EEG==True)' dataset_list = TetrodeSelect.datasets(tetrode_query, allow_ambiguous=True) Band = get_filter(band) was_zero_lag = Band.zero_lag Band.zero_lag = True self.results['band'] = band # Rat accumulators rat_number = [] running = {} scan = {} pause = {} #if distro == 'frequency': # smoothing = THETA_FREQ_SMOOTHING #elif distro == 'power': # self.out('warning: power smoothing not implemented') # smoothing = THETA_POWER_SMOOTHING #else: # raise ValueError, 'distro must be frequency or power' for dataset in dataset_list: rat, day = dataset roi_tt, base_theta = find_theta_tetrode(dataset, condn=tetrode_query, ambiguous=True) self.out('Rat%03d-%02d: using tetrode Sc%d' % (rat, day, roi_tt)) if rat not in rat_number: rat_number.append(rat) running[rat] = np.array([], 'd') scan[rat] = np.array([], 'd') pause[rat] = np.array([], 'd') for session in get_maze_list(rat, day): rds = rat, day, session data = SessionData.get(rds, load_clusters=False) self.out('Collating %s data for rat%03d-%02d-m%d...' % (distro, rat, day, session)) ts, x = Band.timeseries(*get_eeg_timeseries(rds, roi_tt)) if distro == 'frequency': sig = Band.frequency(x, filtered=True) elif distro == 'power': sig = zscore(Band.power(x, filtered=True)) running_ix = data.filter_tracking_data(ts, boolean_index=True, **data.running_filter()) running[rat] = np.r_[running[rat], sig[running_ix]] scan[rat] = np.r_[scan[rat], sig[select_from(ts, data.scan_list)]] pause[rat] = np.r_[pause[rat], sig[select_from(ts, data.pause_list)]] # Initialize data storage and accumulators running_pdf = [] running_cdf = [] running_mu = [] scan_pdf = [] scan_cdf = [] scan_mu = [] scan_p = [] pause_pdf = [] pause_cdf = [] pause_mu = [] pause_p = [] scan_pause_p = [] # Setup distribution bins if drange is not None: bins = np.linspace(drange[0], drange[1], nbins + 1) elif distro == 'frequency': bins = np.linspace(CfgBand[band][0], CfgBand[band][1], nbins + 1) elif distro == 'power': bins = np.linspace(-2, 3, nbins + 1) self.results['centers'] = (bins[1:] + bins[:-1]) / 2 def sig_distro(data, cdf=False): distro = KT_estimate(np.histogram(data, bins=bins)[0]) if cdf: distro = np.cumsum(distro) / np.sum(distro) return distro for rat in rat_number: self.out('Computing distributions and stats for rat %d...' % rat) running_pdf.append(sig_distro(running[rat])) scan_pdf.append(sig_distro(scan[rat])) pause_pdf.append(sig_distro(pause[rat])) running_cdf.append(sig_distro(running[rat], cdf=True)) scan_cdf.append(sig_distro(scan[rat], cdf=True)) pause_cdf.append(sig_distro(pause[rat], cdf=True)) running_mu.append(running[rat].mean()) scan_mu.append(scan[rat].mean()) D, pval = st.ks_2samp(scan[rat], running[rat]) scan_p.append(pval) pause_mu.append(pause[rat].mean()) D, pval = st.ks_2samp(pause[rat], running[rat]) pause_p.append(pval) D, pval = st.ks_2samp(scan[rat], pause[rat]) scan_pause_p.append(pval) # Store results data self.results['rat_number'] = np.array(rat_number) self.results['running_pdf'] = np.array(running_pdf) self.results['scan_pdf'] = np.array(scan_pdf) self.results['pause_pdf'] = np.array(pause_pdf) self.results['running_cdf'] = np.array(running_cdf) self.results['scan_cdf'] = np.array(scan_cdf) self.results['pause_cdf'] = np.array(pause_cdf) self.results['running_mu'] = np.array(running_mu) self.results['scan_mu'] = np.array(scan_mu) self.results['scan_p'] = np.array(scan_p) self.results['pause_mu'] = np.array(pause_mu) self.results['pause_p'] = np.array(pause_p) self.results['scan_pause_p'] = np.array(scan_pause_p) # Good-bye! Band.zero_lag = was_zero_lag self.out('All done!')
def collect_data(self, area='CA1', phase_band='theta', amp_band='gamma', tetrode='theta', cycles=2, nbins=72): """Collate phase-amplitude modulation data about head-scan events """ tetrode_query = '(area=="%s")&(EEG==True)' % area dataset_list = TetrodeSelect.datasets(tetrode_query, allow_ambiguous=True) self.results['phase_band'] = phase_band self.results['amp_band'] = amp_band # Dataset accumulators rat_number = self.results['rat_number'] = [] P_running = {} P_scan = {} P_pause = {} for dataset in dataset_list: rat, day = dataset # Find the tetrode with the higheset overall relative theta power if tetrode == 'theta': roi_tt, _rtheta = find_theta_tetrode(dataset, condn=tetrode_query, ambiguous=True) else: roi_tt = find_pyramidale_tetrode(dataset, condn=tetrode_query, ambiguous=True) self.out('Rat%03d-%02d: using tetrode Sc%d' % (rat, day, roi_tt)) # Session accumulators phase_t = np.array([], 'i8') phase = np.array([], 'd') running_amp_t = np.array([], 'i8') running_amp = np.array([], 'd') scan_amp_t = np.array([], 'i8') scan_amp = np.array([], 'd') pause_amp_t = np.array([], 'i8') pause_amp = np.array([], 'd') self.out('Collating phase-amplitude data for rat%03d-%02d...' % dataset) for session in get_maze_list(rat, day): rds = rat, day, session data = SessionData.get(rds) phase_data, amp_data = phase_modulation_timeseries( *get_eeg_timeseries(rds, roi_tt), phase=phase_band, amp=amp_band) t_phase, phi_x = phase_data t_amp, A_x = amp_data phase_t = np.r_[phase_t, t_phase] phase = np.r_[phase, phi_x] ix = data.velocity_filter(t_amp) running_amp_t = np.r_[running_amp_t, t_amp[ix]] running_amp = np.r_[running_amp, A_x[ix]] ix = select_from(t_amp, data.scan_list) scan_amp_t = np.r_[scan_amp_t, t_amp[ix]] scan_amp = np.r_[scan_amp, A_x[ix]] ix = select_from(t_amp, data.pause_list) pause_amp_t = np.r_[pause_amp_t, t_amp[ix]] pause_amp = np.r_[pause_amp, A_x[ix]] # Initialize per-rat phase distributions if rat not in rat_number: rat_number.append(rat) P_running[rat] = [] P_scan[rat] = [] P_pause[rat] = [] self.out('...computing phase distributions...') phase_series = (phase_t, phase) P_running[rat].append( PAD(phase_series, (running_amp_t, running_amp), nbins=nbins)) P_scan[rat].append( PAD(phase_series, (scan_amp_t, scan_amp), nbins=nbins)) P_pause[rat].append( PAD(phase_series, (pause_amp_t, pause_amp), nbins=nbins)) self.out('Averaging dataset distributions to rat distributions...') norm = lambda P: P / P.sum() for rat in rat_number: P_running[rat] = norm(np.array(P_running[rat]).mean(axis=0)) P_scan[rat] = norm(np.array(P_scan[rat]).mean(axis=0)) P_pause[rat] = norm(np.array(P_pause[rat]).mean(axis=0)) # Initialize data storage and accumulators running_distro = [] running_index = [] scan_distro = [] scan_index = [] pause_distro = [] pause_index = [] self.out('Computing display distributions and modulation indexes...') plottable = lambda P: plottable_phase_distribution(P, cycles=cycles) for rat in rat_number: phase_bins, P = plottable(P_running[rat]) if 'phase_bins' not in self.results: self.results['phase_bins'] = phase_bins running_distro.append(P) running_index.append(modulation_index(P_running[rat])) scan_distro.append(plottable(P_scan[rat])[1]) scan_index.append(modulation_index(P_scan[rat])) pause_distro.append(plottable(P_pause[rat])[1]) pause_index.append(modulation_index(P_pause[rat])) # Store results data self.results['running_distro'] = np.array(running_distro) self.results['running_index'] = np.array(running_index) self.results['scan_distro'] = np.array(scan_distro) self.results['scan_index'] = np.array(scan_index) self.results['pause_distro'] = np.array(pause_distro) self.results['pause_index'] = np.array(pause_index) # Good-bye! self.out('All done!')