def get_evoked_responses(): save_path = core.get_save_path() manifest_path = core.get_manifest_path() boc = BrainObservatoryCache(manifest_file=manifest_path) exps = pd.DataFrame(boc.get_ophys_experiments(include_failed=False)) for i,a in enumerate(exps.experiment_container_id.unique()): if np.mod(i,50)==0: print i subset = exps[exps.experiment_container_id==a] session_A = subset[subset.session_type=='three_session_A'].id.values[0] dataset = boc.get_ophys_experiment_data(session_A) cell_ids_A = dataset.get_cell_specimen_ids() data_file_dg = os.path.join(save_path, 'DriftingGratings', str(session_A)+"_dg_events_analysis.h5") f = h5py.File(data_file_dg) response_dg = f['response_events'].value f.close() numbercells = response_dg.shape[2] evoked_response = pd.DataFrame(columns=('cell_specimen_id','max_evoked_response_dg'), index=range(numbercells)) evoked_response['cell_specimen_id'] = cell_ids_A response2 = response_dg.reshape(48,numbercells,3) evoked_response.max_evoked_response_dg = np.nanmax(response2[:,:,0], axis=0) if i==0: evoked_response_all = evoked_response.copy() else: evoked_response_all = evoked_response_all.append(evoked_response) return evoked_response_all
def __init__(self, session_id): self.session_id = session_id save_path_head = core.get_save_path() self.save_path = os.path.join(save_path_head, 'LocallySparseNoise') self.l0_events = core.get_L0_events(self.session_id) self.stim_table_sp, _, _ = core.get_stim_table(self.session_id, 'spontaneous') self.dxcm = core.get_running_speed(self.session_id) try: lsn_name = 'locally_sparse_noise' self.stim_table, self.numbercells, self.specimen_ids = core.get_stim_table( self.session_id, lsn_name) self.LSN = core.get_stimulus_template(self.session_id, lsn_name) self.sweep_events, self.mean_sweep_events, self.sweep_p_values, self.running_speed, self.response_events_on, self.response_events_off = self.get_stimulus_response( self.LSN) except: lsn_name = 'locally_sparse_noise_4deg' self.stim_table, self.numbercells, self.specimen_ids = core.get_stim_table( self.session_id, lsn_name) self.LSN_4deg = core.get_stimulus_template(self.session_id, lsn_name) self.sweep_events_4deg, self.mean_sweep_events_4deg, self.sweep_p_values_4deg, self.running_speed_4deg, self.response_events_on_4deg, self.response_events_off_4deg = self.get_stimulus_response( self.LSN_4deg) lsn_name = 'locally_sparse_noise_8deg' self.stim_table, _, _ = core.get_stim_table( self.session_id, lsn_name) self.LSN_8deg = core.get_stimulus_template(self.session_id, lsn_name) self.sweep_events_8deg, self.mean_sweep_events_8deg, self.sweep_p_values_8deg, self.running_speed_8deg, self.response_events_on_8deg, self.response_events_off_8deg = self.get_stimulus_response( self.LSN_8deg) self.peak = self.get_peak(lsn_name) self.save_data(lsn_name)
def __init__(self, *args, **kwargs): for k, v in kwargs.iteritems(): setattr(self, k, v) self.session_id = session_id save_path_head = core.get_save_path() self.save_path = os.path.join(save_path_head, 'NaturalMovies') self.l0_events = core.get_L0_events(self.session_id) self.stim_table_1b, self.numbercells, self.specimen_ids = core.get_stim_table( self.session_id, 'natural_movie_one')
def get_all_dg(): save_path = core.get_save_path() manifest_path = core.get_manifest_path() boc = BrainObservatoryCache(manifest_file=manifest_path) exps = pd.DataFrame(boc.get_ophys_experiments(include_failed=False)) missing = [] for i, a in enumerate(exps.experiment_container_id.unique()): subset = exps[exps.experiment_container_id == a] try: session_A = subset[subset.session_type == 'three_session_A'].id.values[0] data_file_dg = os.path.join( save_path, 'DriftingGratings', str(session_A) + "_dg_events_analysis.h5") peak_dg = pd.read_hdf(data_file_dg, 'peak') peak_dg['experiment_container_id'] = a peak_dg['tld1_name'] = subset.cre_line.iloc[0] peak_dg['area'] = subset.targeted_structure.iloc[0] peak_dg['imaging_depth'] = subset.imaging_depth.iloc[0] if subset.cre_line.iloc[0] in ['Scnn1a-Tg3-Cre', 'Nr5a1-Cre']: depth_range = 200 elif subset.cre_line.iloc[0] in ['Fezf2-CreER']: depth_range = 300 else: depth_range = 100 * ( (np.floor(subset.imaging_depth.iloc[0] / 100)).astype(int)) peak_dg['depth_range'] = depth_range peak_dg['cre_depth'] = peak_dg[['tld1_name', 'depth_range']].apply(tuple, axis=1) if i == 0: peak_all = peak_dg.copy() else: peak_all = peak_all.append(peak_dg) except: missing.append(a) peak_all.reset_index(inplace=True) peak_all['type'] = 'E' peak_all.ix[peak_all.tld1_name == 'Sst-IRES-Cre', 'type'] = 'I' peak_all.ix[peak_all.tld1_name == 'Vip-IRES-Cre', 'type'] = 'I' # peak_all['depth_range'] =np.floor(peak_all.imaging_depth/100) # peak_all[['depth_range']] = peak_all[['depth_range']].astype(int) # peak_all.depth_range*=100 # peak_all.loc[peak_all.tld1_name=='Scnn1a-Tg3-Cre','depth_range'] = 200 # peak_all.loc[peak_all.tld1_name=='Nr5a1-Cre','depth_range'] = 200 # peak_all.loc[peak_all.tld1_name=='Fezf2-CreER', 'depth_range'] = 300 peak_all.to_hdf(os.path.join(save_path, 'Metrics', 'metrics_dg.h5'), 'peak_all') return peak_all, missing
def merge_all_metrics(): save_path = core.get_save_path() manifest_path = core.get_manifest_path() boc = BrainObservatoryCache(manifest_file=manifest_path) exps = pd.DataFrame(boc.get_ophys_experiments(include_failed=False)) for i, a in enumerate(exps.experiment_container_id.unique()): subset = exps[exps.experiment_container_id == a] session_A = subset[subset.session_type == 'three_session_A'].id.values[0] session_B = subset[subset.session_type == 'three_session_B'].id.values[0] data_file_dg = os.path.join(save_path, 'Drifting Gratings', str(session_A) + "_dg_events_analysis.h5") peak_dg = pd.read_hdf(data_file_dg, 'peak') data_file_nm = os.path.join(save_path, 'Natural Movies', str(session_A) + "_nm_events_analysis.h5") peak_nm = pd.read_hdf(data_file_nm, 'peak') data_file_sg = os.path.join(save_path, 'Static Gratings', str(session_B) + "_sg_events_analysis.h5") peak_sg = pd.read_hdf(data_file_sg, 'peak') data_file_ns = os.path.join(save_path, 'Natural Scenes', str(session_B) + "_ns_events_analysis.h5") peak_ns = pd.read_hdf(data_file_ns, 'peak') peak_all = pd.merge(peak_dg, peak_nm, on='cell_specimen_id', how='outer') peak_all = pd.merge(peak_all, peak_sg, on='cell_specimen_id', how='outer') peak_all = pd.merge(peak_all, peak_ns, on='cell_specimen_id', how='outer') # peak_all = pd.merge(peak_all, peak_lsn, on='cell_specimen_id', how='outer') peak_all['experiment_container_id'] = a peak_all['tld1_name'] = subset.cre_line.iloc[0] peak_all['area'] = subset.targeted_structure.iloc[0] peak_all['imaging_depth'] = subset.imaging_depth.iloc[0] peak_all.to_csv( os.path.join(save_path, 'Metrics', str(a) + '_all_metrics.csv')) if i == 0: metrics = peak_all.copy() else: metrics = metrics.append(peak_all) metrics.to_csv(os.path.join(save_path, 'Metrics', 'metrics.csv')) return metrics
def __init__(self, session_id): self.session_id = session_id save_path_head = core.get_save_path() self.save_path = os.path.join(save_path_head, 'NaturalMoviesB') self.l0_events = core.get_L0_events(self.session_id) self.stim_table_1b, self.numbercells, self.specimen_ids = core.get_stim_table( self.session_id, 'natural_movie_one') self.response_events_1b, self.response_trials_1b = self.get_stimulus_response_one( ) self.peak = self.get_peak() self.save_data()
def __init__(self, session_id): self.session_id = session_id save_path_head = core.get_save_path() self.save_path = os.path.join(save_path_head, 'NaturalScenes') self.l0_events = core.get_L0_events(self.session_id) self.stim_table, self.numbercells, self.specimen_ids = core.get_stim_table(self.session_id, 'natural_scenes') self.stim_table_sp,_,_ = core.get_stim_table(self.session_id, 'spontaneous') self.dxcm = core.get_running_speed(self.session_id) self.sweep_events, self.mean_sweep_events, self.sweep_p_values, self.running_speed, self.response_events, self.response_trials = self.get_stimulus_response() self.peak = self.get_peak() self.save_data()
def __init__(self, session_id): self.session_id = session_id save_path_head = core.get_save_path() self.save_path = os.path.join(save_path_head, 'DriftingGratings') self.l0_events = core.get_L0_events(self.session_id) self.stim_table, self.numbercells, self.specimen_ids = core.get_stim_table( self.session_id, 'drifting_gratings') self.dxcm = core.get_running_speed(self.session_id) self.stim_table_sp, _, _ = core.get_stim_table(self.session_id, 'spontaneous') self.orivals = range(0, 360, 45) self.tfvals = [0, 1, 2, 4, 8, 15] self.sweep_events, self.mean_sweep_events, self.sweep_p_values, self.running_speed, self.response_events, self.response_trials = self.get_stimulus_response( ) self.peak = self.get_peak() self.save_data()
def __init__(self, session_id): self.session_id = session_id save_path_head = core.get_save_path() self.save_path = os.path.join(save_path_head, 'StaticGratings') self.l0_events = core.get_L0_events(self.session_id) self.stim_table, self.numbercells, self.specimen_ids = core.get_stim_table( self.session_id, 'static_gratings') self.stim_table_sp, _, _ = core.get_stim_table(self.session_id, 'spontaneous') self.dxcm = core.get_running_speed(self.session_id) self.orivals = range(0, 180, 30) self.sfvals = [0, 0.02, 0.04, 0.08, 0.16, 0.32] self.phasevals = [0, 0.25, 0.5, 0.75] self.sweep_events, self.mean_sweep_events, self.sweep_p_values, self.running_speed, self.response_events, self.response_trials = self.get_stimulus_response( ) self.peak = self.get_peak() self.save_data()
def __init__(self, session_id): self.session_id = session_id save_path_head = core.get_save_path() self.save_path = os.path.join(save_path_head, 'LocallySparseNoise') self.l0_events = core.get_L0_events(self.session_id) #TODO: enable lsn, lsn4, lsn8 lsn_name = 'locally_sparse_noise' self.stim_table, self.numbercells, self.specimen_ids = core.get_stim_table( self.session_id, 'locally_sparse_noise') self.LSN = core.get_stimulus_template('locally_sparse_noise') self.stim_table_sp, _, _ = core.get_stim_table(self.session_id, 'spontaneous') self.dxcm = core.get_running_speed(self.session_id) self.sweep_events, self.mean_sweep_events, self.sweep_p_values, self.running_speed, self.response_events, self.response_trials = self.get_stimulus_response( ) self.peak = self.get_peak() self.save_data()
#!/usr/bin/env python2 # -*- coding: utf-8 -*- """ Created on Thu Apr 5 15:57:59 2018 @author: saskiad """ import numpy as np import pandas as pd import os, h5py import core manifest_path = core.get_manifest_path() save_path_head = core.get_save_path() save_path = os.path.join(save_path_head, 'StaticGratings') manifest_path = core.get_manifest_path() from allensdk.core.brain_observatory_cache import BrainObservatoryCache boc = BrainObservatoryCache(manifest_file=manifest_path) exp = pd.DataFrame( boc.get_ophys_experiments(session_types=['three_session_B'])) pop_sparse = np.empty((120, 424)) pop_sparse[:] = np.NaN pop_sparseness = pd.DataFrame( columns=('experiment_container_id', 'id', 'cre', 'area', 'depth_range', 'cre_depth', 'population_sparseness_sg', 'number_cells_sg'), index=range(424))
def get_movie_correlations(): save_path = core.get_save_path() manifest_path = core.get_manifest_path() boc = BrainObservatoryCache(manifest_file=manifest_path) exps = pd.DataFrame(boc.get_ophys_experiments(include_failed=False)) for i, a in enumerate(exps.experiment_container_id.unique()): if np.mod(i, 50) == 0: print i subset = exps[exps.experiment_container_id == a] session_A = subset[subset.session_type == 'three_session_A'].id.values[0] session_B = subset[subset.session_type == 'three_session_B'].id.values[0] try: session_C = subset[subset.session_type == 'three_session_C'].id.values[0] except: session_C = subset[subset.session_type == 'three_session_C2'].id.values[0] a_path = os.path.join(save_path, 'NaturalMoviesA', str(session_A) + '_nm_events_analysis.h5') b_path = os.path.join(save_path, 'NaturalMoviesB', str(session_B) + '_nm_events_analysis.h5') c_path = os.path.join(save_path, 'NaturalMoviesC', str(session_C) + '_nm_events_analysis.h5') dataset_A = boc.get_ophys_experiment_data(session_A) specimen_ids_A = dataset_A.get_cell_specimen_ids() dataset_B = boc.get_ophys_experiment_data(session_B) specimen_ids_B = dataset_B.get_cell_specimen_ids() dataset_C = boc.get_ophys_experiment_data(session_C) specimen_ids_C = dataset_C.get_cell_specimen_ids() f = h5py.File(a_path) response_A = f['response_events_1a'].value f.close() f = h5py.File(b_path) response_B = f['response_events_1b'].value f.close() f = h5py.File(c_path) response_C = f['response_events_1c'].value f.close() peak_A = pd.read_hdf(a_path, 'peak') peak_A['peak_frame_nm1a'] = peak_A.peak_frame_nm1a.astype(int) peak_B = pd.read_hdf(b_path, 'peak') peak_B['peak_frame_nm1b'] = peak_B.peak_frame_nm1b.astype(int) peak_C = pd.read_hdf(c_path, 'peak') peak_C['peak_frame_nm1c'] = peak_C.peak_frame_nm1c.astype(int) peak = pd.merge(peak_A, peak_B, on='cell_specimen_id', how='outer') peak = pd.merge(peak, peak_C, on='cell_specimen_id', how='outer') peak['correlation_ab'] = np.NaN peak['correlation_bc'] = np.NaN peak['correlation_ac'] = np.NaN peak_subset = peak[np.isfinite(peak.peak_frame_nm1a) & np.isfinite(peak.peak_frame_nm1b)] for index, row in peak_subset.iterrows(): nc = row.cell_specimen_id resp_A = response_A[:, np.where(specimen_ids_A == nc)[0][0], 0] resp_B = response_B[:, np.where(specimen_ids_B == nc)[0][0], 0] r, p = st.pearsonr(resp_A, resp_B) peak.correlation_ab.loc[index] = r peak_subset = peak[np.isfinite(peak.peak_frame_nm1a) & np.isfinite(peak.peak_frame_nm1c)] for index, row in peak_subset.iterrows(): nc = row.cell_specimen_id resp_A = response_A[:, np.where(specimen_ids_A == nc)[0][0], 0] resp_C = response_C[:, np.where(specimen_ids_C == nc)[0][0], 0] r, p = st.pearsonr(resp_A, resp_C) peak.correlation_ac.loc[index] = r peak_subset = peak[np.isfinite(peak.peak_frame_nm1c) & np.isfinite(peak.peak_frame_nm1b)] for index, row in peak_subset.iterrows(): nc = row.cell_specimen_id resp_C = response_C[:, np.where(specimen_ids_C == nc)[0][0], 0] resp_B = response_B[:, np.where(specimen_ids_B == nc)[0][0], 0] r, p = st.pearsonr(resp_C, resp_B) peak.correlation_bc.loc[index] = r peak['experiment_container_id'] = a peak['tld1_name'] = subset.cre_line.iloc[0] peak['area'] = subset.targeted_structure.iloc[0] peak['imaging_depth'] = subset.imaging_depth.iloc[0] if subset.cre_line.iloc[0] in ['Scnn1a-Tg3-Cre', 'Nr5a1-Cre']: depth_range = 200 elif subset.cre_line.iloc[0] in ['Fezf2-CreER']: depth_range = 300 else: depth_range = 100 * ( (np.floor(subset.imaging_depth.iloc[0] / 100)).astype(int)) peak['depth_range'] = depth_range peak['cre_depth'] = peak[['tld1_name', 'depth_range']].apply(tuple, axis=1) if i == 0: peak_all = peak.copy() else: peak_all = peak_all.append(peak, ignore_index=True) peak_all.reset_index(inplace=True) peak_all['type'] = 'E' peak_all.ix[peak_all.tld1_name == 'Sst-IRES-Cre', 'type'] = 'I' peak_all.ix[peak_all.tld1_name == 'Vip-IRES-Cre', 'type'] = 'I' peak_all.to_hdf(os.path.join(save_path, 'Metrics', 'metrics_nm.h5'), 'peak_all') return peak_all
""" # dataset # get spontanoues stim table # get L0 # spontaneous rate # table cell id, cre, area, depth, cre_depth, spontaneous rate import numpy as np import pandas as pd #import os, h5py #import scipy.stats as st import core from allensdk.core.brain_observatory_cache import BrainObservatoryCache save_path = core.get_save_path() manifest_path = core.get_manifest_path() boc = BrainObservatoryCache(manifest_file=manifest_path) exps = pd.DataFrame(boc.get_ophys_experiments(include_failed=False)) for i,a in enumerate(exps.experiment_container_id.unique()): if np.mod(i,50)==0: print i subset = exps[exps.experiment_container_id==a] session_A = subset[subset.session_type=='three_session_A'].id.values[0] session_B = subset[subset.session_type=='three_session_B'].id.values[0] try: session_C = subset[subset.session_type=='three_session_C'].id.values[0] except: session_C = subset[subset.session_type=='three_session_C2'].id.values[0] dataset_A = boc.get_ophys_experiment_data(session_A) specimen_ids_A = dataset_A.get_cell_specimen_ids()
def Speed_Tuning(session_id, binsize=900): save_path_head = core.get_save_path() save_path = os.path.join(save_path_head, 'SpeedTuning') l0_events = core.get_L0_events(session_id) dxcm = core.get_running_speed(session_id) _, numbercells, specimen_ids = core.get_stim_table(session_id, 'spontaneous') # numbercells = l0_events.shape numbins = 1 + len(dxcm[np.where(dxcm > 1)] / binsize) #remove any NaNs from running and activity traces dx_trim = dxcm[~np.isnan(dxcm)] l0_trim = l0_events[:, ~np.isnan(dxcm)] #rank sort traces by running speed dx_sorted = dx_trim[np.argsort(dx_trim)] events_sorted = l0_trim[:, np.argsort(dx_trim)] #bin running and activity binned_cells = np.zeros((numbercells, numbins, 2)) binned_dx = np.zeros((numbins, 2)) for i in range(numbins): offset = np.were(dx_sorted > 1)[0][0] if i == 0: binned_dx[i, 0] = np.mean(dx_sorted[:offset]) binned_dx[i, 1] = np.std(dx_sorted[:offset]) / np.sqrt(offset) binned_cells[:, i, 0] = np.mean(events_sorted[:, :offset], axis=1) binned_cells[:, i, 1] = np.std(events_sorted[:, :offset], axis=1) / np.sqrt(offset) else: start = offset + (i - 1) * binsize binned_dx[i, 0] = np.mean(dx_sorted[start:start + binsize]) binned_dx[i, 1] = np.std( dx_sorted[start:start + binsize]) / np.sqrt(binsize) binned_cells[:, i, 0] = np.mean(events_sorted[:, start:start + binsize], axis=1) binned_cells[:, i, 1] = np.std(events_sorted[:, start:start + binsize], axis=1) / np.sqrt(binsize) #shuffled activity to get significance binned_cells_shuffled = np.empty((numbercells, numbins, 2, 200)) for shuf in range(200): events_shuffled = l0_trim[:, np.random.permutation(np.size(l0_trim, 1))] events_shuffled_sorted = events_shuffled[:, np.argsort(dx_trim)] for i in range(numbins): offset = np.were(dx_sorted > 1)[0][0] if i == 0: binned_cells_shuffled[:, i, 0, shuf] = np.mean( events_shuffled_sorted[:, :offset], axis=1) binned_cells_shuffled[:, i, 1, shuf] = np.std( events_shuffled_sorted[:, :offset], axis=1) / np.sqrt(offset) else: start = offset + (i - 1) * binsize binned_cells_shuffled[:, i, 0, shuf] = np.mean( events_shuffled_sorted[:, start:start + binsize], axis=1) binned_cells_shuffled[:, i, 1, shuf] = np.std( events_shuffled_sorted[:, start:start + binsize], axis=1) / np.sqrt(binsize) shuffled_variance = binned_cells_shuffled[:, :, 0, :].std(axis=1)**2 variance_threshold = np.percentile(shuffled_variance, 99.9, axis=1) response_variance = binned_cells[:, :, 0].std(axis=1)**2 peak = pd.DataFrame(columns=('cell_specimen_id', 'run_mod'), index=range(numbercells)) peak.cell_specimen_id = specimen_ids peak.run_mod = response_variance > variance_threshold #save data save_file = os.path.join(save_path, str(session_id) + '_speed_tuning_events.h5') store = pd.HDFStore(save_file) store['peak'] = peak store.close() f = h5py.File(save_file, 'r+') dset = f.create_dataset('binned_dx', data=binned_dx) dset1 = f.create_dataset('binned_cells', data=binned_cells) dset2 = f.create_dataset('binned_cells_shuffled', data=binned_cells_shuffled) f.close()