def batch_calc_MSE(): p_load = os.path.join(os.environ['BOX_PATH'], r'__VG3D\_deflection_trials\_NEO') p_save = os.path.join(os.environ['BOX_PATH'], r'__VG3D\_deflection_trials\_NEO\results') p_smooth = r'E:\VG3D\_rerun_with_pad\_deflection_trials\_NEO\smooth' DF = pd.DataFrame() for ii, f in enumerate(glob.glob(os.path.join(p_load, '*.h5'))): if ii == 0: continue blk = neoUtils.get_blk(f) num_units = len(blk.channel_indexes[-1].units) for unit_num in range(num_units): id = neoUtils.get_root(blk, unit_num) print('Working on {}'.format(id)) try: mse = calc_MSE(f, p_smooth, unit_num) df = pd.DataFrame() for ii, var in enumerate( ['Mx', 'My', 'Mz', 'Fx', 'Fy', 'Fz', 'TH', 'PHI']): df[var] = mse[ii] df['id'] = id df['smoothing'] = np.arange(5, 100, 10) DF = DF.append(df) except: print('Problem on {}'.format(id)) DF.to_csv(os.path.join(p_save, 'MSE_by_smoothing.csv'), index=False)
def batch_pc(p_load,p_save): ID = [] COV=[] EXP_VAR=[] for f in glob.glob(os.path.join(p_load,'*.h5')): blk = neoUtils.get_blk(f) id = neoUtils.get_root(blk,0)[:-2] print('Working on {}'.format(id)) pc = get_pc(blk) cov = pc.get_covariance() exp_var = pc.explained_variance_ratio_ COV.append(cov) EXP_VAR.append(exp_var) ID.append(id) ID = np.array(ID) COV = np.array(COV) EXP_VAR = np.array(EXP_VAR) COV = np.moveaxis(COV,[0,1,2],[2,0,1]) EXP_VAR = EXP_VAR.T var_labels = ['Mx','My','Mz','Fx','Fy','Fz','TH','PHI'] np.savez(os.path.join(p_save,'cov_exp_var.npz'), cov=COV, exp_var=EXP_VAR, id=ID, var_labels=var_labels) print('Saved PCA descriptions!') return None
def get_adaptation_df(p_load, max_t=20): ''' Returns a dataframe that has the firing rate for the first N (default=20) ms for each cell and direction. Should be useful for calculating an 'adaptation' parameter. :param p_load: path to where all the neo files exist :param max_t: maximum time in miliseconds to grab the firing rate :return: ''' df_all = pd.DataFrame() for f in glob.glob(os.path.join(p_load, '*.h5')): blk = neoUtils.get_blk(f) num_units = len(blk.channel_indexes[-1].units) for unit_num in range(num_units): df = pd.DataFrame() id = neoUtils.get_root(blk, unit_num) print('Working on {}'.format(id)) PSTH, edges, _, med_angle = get_PSTH_by_dir(blk, unit_num, norm_dur=False, binsize=1 * pq.ms) if PSTH is -1: continue for ii in xrange(len(PSTH)): df_dir = pd.DataFrame() df_dir['dir_idx'] = np.repeat(ii, max_t) df_dir['time'] = edges[ii][:max_t] df_dir['rate'] = PSTH[ii][:max_t] df_dir['med_angle'] = med_angle[ii] df = df.append(df_dir) df['id'] = [id for x in range(len(df))] df_all = df_all.append(df) return (df_all)
def get_threshold_index(p_load): ''' Return a dataframe with a binary telling you if a particular contact ellicited a spike for each cell :param p_load: path to the neo files :return: a pandas dataframe with all the contacts for all cells. Cannot reshape since every whisker has a dif. number of contacts ''' df_all = pd.DataFrame() for f in glob.glob(os.path.join(p_load, '*.h5')): blk = neoUtils.get_blk(f) num_units = len(blk.channel_indexes[-1].units) for unit_num in xrange(num_units): df = pd.DataFrame() id = neoUtils.get_root(blk, unit_num) print('working on {}'.format(id)) trains = spikeAnalysis.get_contact_sliced_trains(blk, unit_num)[-1] dir_idx, med_dir = worldGeometry.get_contact_direction( blk, plot_tgl=False) if dir_idx is -1: continue dir_map = {key: value for (key, value) in enumerate(med_dir)} df['id'] = [id for x in xrange(len(trains))] df['did_spike'] = [len(x) > 0 for x in trains] df['dir_idx'] = dir_idx df['med_dir'] = df['dir_idx'].map(dir_map) df_all = df_all.append(df) return (df_all)
def run_model(fname, p_smooth, unit_num, savepath, param_dict): X, y, cbool = get_X_y(fname, p_smooth, unit_num) blk = neoUtils.get_blk(fname) root = neoUtils.get_root(blk, unit_num) savefile = os.path.join(savepath, '{}_tensorflow.ckpt'.format(root)) X[np.invert(cbool), :] = 0 y[np.invert(cbool), :] = 0 Xb = X_to_pillow(X[:, :8]) print(param_dict) build_GLM_model(Xb, y, savefile, **param_dict)
def plot_joint_spaces(p_load,p_save): for f in glob.glob(os.path.join(p_load,'rat*.h5')): print(os.path.basename(f)) blk = neoUtils.get_blk(f) for unit in blk.channel_indexes[-1].units: unit_num = int(unit.name[-1]) try: mymz_space(blk, unit_num, p_save=p_save, save_tgl=True, im_ext='png', dpi_res=300) phase_plots(blk, unit_num, p_save=p_save, save_tgl=True, im_ext='png', dpi_res=300) except: print('File {} did not create jointplots'.format(neoUtils.get_root(blk,unit_num))) pass
def get_radial_distance_group(blk, plot_tgl=False): S = neoUtils.get_var(blk, 'S') use_flags = neoUtils.concatenate_epochs(blk, -1) S_contacts = neoUtils.get_analog_contact_slices(S, use_flags) S_med = np.nanmedian(S_contacts, axis=0) mask = [np.isfinite(S_med).ravel()] S_med_masked = S_med[mask] if len(S_med_masked) < 10: return (-2) clf3 = mixture.GaussianMixture(n_components=3, n_init=100) clf2 = mixture.GaussianMixture(n_components=2, n_init=100) clf3.fit(S_med_masked) clf2.fit(S_med_masked) if clf2.aic(S_med_masked) < clf3.aic(S_med_masked): n_clusts = 2 idx = clf2.predict(S_med_masked) else: n_clusts = 3 idx = clf3.predict(S_med_masked) S_clusts = [] for ii in xrange(n_clusts): S_clusts.append(np.nanmedian(S_med_masked[idx == ii])) ordering = np.argsort(S_clusts) idx = np.array([np.where(x == ordering)[0][0] for x in idx]) S_clusts.sort() if np.any(np.isnan(S_clusts)): return (-1) idx_out = np.zeros(S_med.shape[0], dtype='int') idx_out[mask] = idx bin_edges = np.histogram(S_med_masked, 50)[1][:-1] if plot_tgl: sns.set_style('ticks') for ii in xrange(n_clusts): if n_clusts == 2: cc = plotVG3D.arclength_group_colors()[0::2] else: cc = plotVG3D.arclength_group_colors() sns.distplot(S_med[idx == ii], bins=bin_edges, color=cc[ii], kde=False) ax = plt.gca() ax.set_ylabel('Number of contacts') ax.set_xlabel('Arclength at contact (m)') ax.grid('off', axis='x') ax.set_title('{}'.format(neoUtils.get_root(blk, 0))) sns.despine() return (idx_out)
def main(fname,p_smooth,nfilts=3,pca_tgl=False): print('loading in {}'.format(fname)) blk = neoUtils.get_blk(fname) save_dir = os.path.split(fname)[0] num_units = len(blk.channel_indexes[-1].units) for unit_num in range(num_units): id = neoUtils.get_root(blk,unit_num) save_file = os.path.join(save_dir,'{}_IF_model.npz'.format(id)) if os.path.isfile(save_file): print('File already found. Aborting...') break X,y,cbool = get_X_y(fname,p_smooth,unit_num=unit_num) if pca_tgl: npcs=8 X_pca = np.zeros([X.shape[0],npcs]) pca = sklearn.decomposition.PCA() X_pca[cbool,:] = pca.fit_transform(X[cbool,:])[:,:npcs] X = X_pca.copy() free_params = init_free_params(X,nfilts) free_params = convert_free_params(free_params,X,nfilts,out_type='list') free_params = np.array(free_params) const_params = init_constrained_params() #algo = nlopt.LN_SBPLX algo = nlopt.LN_COBYLA #algo = nlopt.LN_NELDERMEAD opt = nlopt.opt(algo,free_params.shape[0]) opt.set_min_objective(lambda free_params,grad: optim_func(free_params,X,y,nfilts,const_params,cbool)) lb,ub = nlopt_bounds(free_params) #opt.set_lower_bounds(lb) #opt.set_upper_bounds(ub) dx = np.abs(free_params)/50. opt.set_initial_step(dx) xopt = opt.optimize(free_params) np.savez(save_file, X=X, y=y, cbool=cbool, opt=opt, xopt=xopt, free_params=free_params, const_params=const_params, nfilts=nfilts)
def ISI_by_deflection(blk, unit_num=0): unit = blk.channel_indexes[-1].units[unit_num] ISI = spikeAnalysis.get_contact_sliced_trains(blk, unit)[1] CV, LV = spikeAnalysis.get_CV_LV(ISI) mean_ISI = np.array([np.mean(x) for x in ISI]) idx, med_angle = worldGeometry.get_contact_direction(blk, plot_tgl=False) df = pd.DataFrame() df['id'] = [neoUtils.get_root(blk, unit_num) for x in range(len(ISI))] df['mean_ISI'] = mean_ISI df['CV'] = CV df['LV'] = LV df['dir_idx'] = idx df['med_dir'] = [med_angle[x] for x in idx] return (df)
def get_onset_and_duration_spikes(p_load, dur=10 * pq.ms): """ loops through all the data we have and gets the number of spikes during an onset duration, the total number of spikes during the contact duration, and the length of the contact. This will allow us to calculate how much the spiking occurs in the first interval :param p_load: directory where the h5 files live :param dur: a python quantity to determine the 'onset' epoch :return: a dataframe with a summary of the relevant data """ df_all = pd.DataFrame() for f in glob.glob(os.path.join(p_load, '*.h5')): blk = neoUtils.get_blk(f) num_units = len(blk.channel_indexes[-1].units) for unit_num in range(num_units): df = pd.DataFrame() id = neoUtils.get_root(blk, unit_num) print('Working on {}'.format(id)) _, _, trains = spikeAnalysis.get_contact_sliced_trains( blk, unit_num) dir_idx, med_angle = worldGeometry.get_contact_direction( blk, plot_tgl=False) dir = [] full = [] contact_duration = [] onset = [] for train, direction in zip(trains, dir_idx): onset.append( len(train.time_slice(train.t_start, train.t_start + dur))) full.append(len(train)) dir.append(direction) contact_duration.append(float(train.t_stop - train.t_start)) df_dir = pd.DataFrame() df_dir['dir_idx'] = dir df_dir['time'] = contact_duration df_dir['total_spikes'] = full df_dir['onset_spikes'] = onset df_dir['med_angle'] = [med_angle[x] for x in df_dir.dir_idx] df_dir['id'] = id df_all = df_all.append(df_dir) df_all['onset_period'] = dur return (df_all)
def calc_MSE(fname, p_smooth, unit_num): blk = neoUtils.get_blk(fname) blk_smooth = GLM.get_blk_smooth(fname, p_smooth) varlist = ['M', 'F', 'TH', 'PHIE'] root = neoUtils.get_root(blk, unit_num) print('Working on {}'.format(root)) Xdot = GLM.get_deriv(blk, blk_smooth, varlist)[0] Xdot = np.reshape(Xdot, [-1, 8, 10]) sp = neoUtils.concatenate_sp(blk)['cell_{}'.format(0)] cbool = neoUtils.get_Cbool(blk) mse = [] for ii in range(Xdot.shape[1]): var_in = Xdot[:, ii, :].copy() mse.append(tuning_curve_MSE(var_in, sp, cbool, bins=50)) return (mse)
def smoothed_best(): df = pd.read_csv(min_entropy, index_col='id') smooth_vals = np.arange(5, 100, 10).tolist() best_smooth = df.mode(axis=1)[0] best_idx = [smooth_vals.index(x) for x in best_smooth] best_idx = pd.DataFrame({'idx': best_idx}, index=best_smooth.index) for f in glob.glob(os.path.join(p_load, '*NEO.h5')): try: blk = neoUtils.get_blk(f) blk_smooth = GLM.get_blk_smooth(f, p_smooth) num_units = len(blk.channel_indexes[-1].units) for unit_num in range(num_units): varlist = ['M', 'F', 'TH', 'PHIE'] root = neoUtils.get_root(blk, unit_num) print('Working on {}'.format(root)) if root not in best_idx.index: print('{} not found in best smoothing derivative data'. format(root)) continue outname = os.path.join( p_save, 'best_smoothing_deriv\\{}_best_smooth_pillowX.mat'.format( root)) X = GLM.create_design_matrix(blk, varlist) smoothing_to_use = best_idx.loc[root][0] Xdot = GLM.get_deriv(blk, blk_smooth, varlist, smoothing=[smoothing_to_use])[0] X = np.concatenate([X, Xdot], axis=1) y = neoUtils.get_rate_b(blk, unit_num)[1] cbool = neoUtils.get_Cbool(blk) arclengths = get_arclength_bool(blk, unit_num) sio.savemat( outname, { 'X': X, 'y': y, 'cbool': cbool, 'smooth': best_smooth.loc[root], 'arclengths': arclengths }) except Exception as ex: print('Problem with {}:{}'.format(os.path.basename(f), ex))
def get_first_spike_vals(fname, p_smooth, unit_num): """ Return a dataframe with length Ncontacts and the value of relevant stimulus features at that time :param blk: neo block :param unit_num: int :return: pandas dataframe """ # get the blocks blk = neoUtils.get_blk(fname) blk_smooth = GLM.get_blk_smooth(fname, p_smooth) # get the trains and times of first spikes _, _, trains = spikeAnalysis.get_contact_sliced_trains(blk, unit_num) t_idx = [ train[0].magnitude if len(train) > 0 else np.nan for train in trains ] t_idx = np.array(t_idx) t_idx = t_idx[np.isfinite(t_idx)].astype('int') # get the stimuli varlist = ['M', 'F', 'TH', 'PHIE'] X = GLM.create_design_matrix(blk, varlist) Xsmooth = GLM.get_deriv(blk, blk_smooth, varlist, smoothing=[9])[1] MB = np.sqrt(X[:, 1]**2 + X[:, 2]**2)[:, np.newaxis] FB = np.sqrt(X[:, 4]**2 + X[:, 5]**2)[:, np.newaxis] RB = np.sqrt(X[:, 6]**2 + X[:, 7]**2)[:, np.newaxis] # use smooth to calculate derivative MBsmooth = np.sqrt(Xsmooth[:, 1]**2 + Xsmooth[:, 2]**2)[:, np.newaxis] FBsmooth = np.sqrt(Xsmooth[:, 4]**2 + Xsmooth[:, 5]**2)[:, np.newaxis] RBsmooth = np.sqrt(Xsmooth[:, 6]**2 + Xsmooth[:, 7]**2)[:, np.newaxis] X = np.concatenate([MB, FB, RB], axis=1) Xsmooth = np.concatenate([MBsmooth, FBsmooth, RBsmooth], axis=1) Xdot = np.diff(np.concatenate([np.zeros([1, 3]), Xsmooth]), axis=0) X = np.concatenate([X, Xdot], axis=1) #extract stimulus at time of first spike and output to a dataframe vals = X[t_idx] vallist = ['MB', 'FB', 'RB', 'MBdot', 'FBdot', 'RBdot'] df = pd.DataFrame() for ii in range(len(vallist)): df[vallist[ii]] = vals[ii, :] df['id'] = neoUtils.get_root(blk, unit_num) return (df)
def mymz_space(blk,unit_num,bin_stretch=False,save_tgl=False,p_save=None,im_ext='png',dpi_res=300): root = neoUtils.get_root(blk,unit_num) use_flags = neoUtils.get_Cbool(blk) M = neoUtils.get_var(blk).magnitude sp = neoUtils.concatenate_sp(blk)['cell_{}'.format(unit_num)] idx = np.all(np.isfinite(M),axis=1) if bin_stretch: MY = np.empty(M.shape[0]) MZ = np.empty(M.shape[0]) MY[idx], logit_y = nl(M[idx, 1],90) MZ[idx], logit_z = nl(M[idx, 2],90) else: MY = M[:,1]*1e-6 MZ = M[:,2]*1e-6 response, var1_edges,var2_edges = varTuning.joint_response_hist(MY,MZ,sp,use_flags,bins = 100,min_obs=15) if bin_stretch: var1_edges = logit_y(var1_edges) var2_edges = logit_z(var2_edges) else: pass ax = varTuning.plot_joint_response(response,var1_edges,var2_edges,contour=False) ax.axvline(color='k',linewidth=1) ax.axhline(color='k',linewidth=1) ax.patch.set_color([0.6,0.6,0.6]) mask = response.mask.__invert__() if not mask.all(): ax.set_ylim(var2_edges[np.where(mask)[0].min()], var2_edges[np.where(mask)[0].max()]) ax.set_xlim(var1_edges[np.where(mask)[1].min()], var1_edges[np.where(mask)[1].max()]) ax.set_xlabel('M$_y$ ($\mu$N-m)') ax.set_ylabel('M$_z$ ($\mu$N-m)') plt.draw() plt.tight_layout() if save_tgl: if p_save is None: raise ValueError("figure save location is required") else: plt.savefig(os.path.join(p_save,'{}_mymz.{}'.format(root,im_ext)),dpi=dpi_res) plt.close('all')
def smoothed_mechanics(): """ use this function to grab the data from the smoothed mechanics and the derivative of the same """ f_arclength = '/projects/p30144/_VG3D/deflections/direction_arclength_FR_group_data.csv' f_list = glob.glob(os.path.join(p_load, '*NEO.h5')) f_list.sort() for f in f_list: try: blk = neoUtils.get_blk(f) blk_smooth = GLM.get_blk_smooth(f, p_smooth) num_units = len(blk.channel_indexes[-1].units) for unit_num in range(num_units): varlist = ['M', 'F', 'TH', 'PHIE'] root = neoUtils.get_root(blk, unit_num) print('Working on {}'.format(root)) outname = os.path.join(p_save, '{}_smooth_mechanicsX.mat'.format(root)) Xdot, X = GLM.get_deriv(blk, blk_smooth, varlist, smoothing=[5]) X = np.concatenate([X, Xdot], axis=1) y = neoUtils.get_rate_b(blk, unit_num)[1] cbool = neoUtils.get_Cbool(blk) arclengths = get_arclength_bool(blk, unit_num, fname=f_arclength) sio.savemat( outname, { 'X': X, 'y': y, 'cbool': cbool, 'smooth': 55, 'arclengths': arclengths }) except Exception as ex: print('Problem with {}:{}'.format(os.path.basename(f), ex))
def MB_curve(blk,unit_num,save_tgl=False,im_ext='svg',dpi_res=300): root = neoUtils.get_root(blk, unit_num) M = neoUtils.get_var(blk) use_flags = neoUtils.get_Cbool(blk) MB = mechanics.get_MB_MD(M)[0].magnitude.ravel() MB[np.invert(use_flags)]=0 sp = neoUtils.concatenate_sp(blk)['cell_{}'.format(unit_num)] r, b = neoUtils.get_rate_b(blk, unit_num, sigma=5 * pq.ms) MB_bayes,edges = varTuning.stim_response_hist(MB*1e6,r,use_flags,nbins=100,min_obs=5) fig = plt.figure() ax = fig.add_subplot(111) ax.plot(edges[:-1],MB_bayes,'o',color='k') ax.set_ylabel('Spike Rate (sp/s)') ax.set_xlabel('Bending Moment ($\mu$N-m)') plt.tight_layout() if save_tgl: plt.savefig('./figs/{}_MB_tuning.{}'.format(root,im_ext),dpi=dpi_res) plt.close('all')
def get_arclength_bool(blk, unit_num, fname=None): # fname is the name of the csv file with arclength groupings if fname is None: if 'BOX_PATH' in os.environ: fname = os.path.join( os.environ['BOX_PATH'], r'__VG3D\_deflection_trials\_NEO\results\direction_arclength_FR_group_data.csv' ) else: fname = os.path.join( '/projects/p30144/_VG3D/deflections/direction_arclength_FR_group_data.csv' ) df = pd.read_csv(fname) id = neoUtils.get_root(blk, unit_num) sub_df = df[df.id == id] arclength_list = sub_df.Arclength.tolist() use_flags = neoUtils.concatenate_epochs(blk) if len(sub_df) != len(use_flags): raise ValueError( 'The number of contacts in the block {} do not match the number of contacts in the csv {}' .format(len(use_flags), len(sub_df))) cbool = neoUtils.get_Cbool(blk) distal_cbool = np.zeros_like(cbool) medial_cbool = np.zeros_like(cbool) proximal_cbool = np.zeros_like(cbool) # loop through each contact and set the appropriate arclength boolean for ii in range(len(use_flags)): start = use_flags[ii].magnitude.astype('int') dur = use_flags.durations[ii].magnitude.astype('int') if arclength_list[ii] == 'Proximal': proximal_cbool[start:start + dur] = 1 elif arclength_list[ii] == 'Distal': distal_cbool[start:start + dur] = 1 elif arclength_list[ii] == 'Medial': medial_cbool[start:start + dur] = 1 arclengths = { 'Distal': distal_cbool, 'Medial': medial_cbool, 'Proximal': proximal_cbool } return (arclengths)
def FX_plots(blk,unit_num,save_tgl=False,im_ext='svg',dpi_res=300): root = neoUtils.get_root(blk, unit_num) F = neoUtils.get_var(blk,'F') Fx = F.magnitude[:,0] use_flags = neoUtils.get_Cbool(blk) sp = neoUtils.concatenate_sp(blk)['cell_{}'.format(unit_num)] r, b = neoUtils.get_rate_b(blk, unit_num, sigma=5 * pq.ms) Fx[np.invert(use_flags)] = 0 Fx_bayes, edges = varTuning.stim_response_hist(Fx * 1e6, r, use_flags, nbins=50, min_obs=5) fig = plt.figure() ax = fig.add_subplot(111) ax.plot(edges[:-1], Fx_bayes*1000, 'o', color='k') ax.set_ylabel('Spike Rate (sp/s)') ax.set_xlabel('Axial Force ($\mu$N-m)') plt.tight_layout() if save_tgl: plt.savefig('./figs/{}_Fx_tuning.{}'.format(root,im_ext), dpi=dpi_res) plt.close('all')
def main(fname, p_smooth, p_save): """ Run the multi-filter GLM on a given file :param fname: :param p_smooth: :param p_save: :return: Saves a numpy file to p_save """ param_dict = { 'family': 'p', 'hist': True, 'nfilts': 3, 'learning_rate': 3e-4, 'batch_size': 4096, 'epochs': 5000, 'min_delta': 0.01, 'patience': 8 } nsims = 100 blk = neoUtils.get_blk(fname) num_units = len(blk.channel_indexes[-1].units) for unit_num in range(num_units): X, y, cbool = get_X_y(fname, p_smooth, unit_num) root = neoUtils.get_root(blk, unit_num) model_fname = os.path.join(p_save, '{}_tensorflow.ckpt'.format(root)) X[np.invert(cbool), :] = 0 y[np.invert(cbool), :] = 0 # Train build_GLM_model(X, y, model_fname, **param_dict) #Simulate output = simulate(X, y, model_fname, cbool, nsims) print('Saving...') np.savez(os.path.join(p_save, '{}_multi_filter.npz'.format(root)), X=X, y=y, cbool=cbool, model_out=output, param_dict=param_dict) print('Saved')
def create_threshold_DF(blk,unit_num=0,time_win=20,max_spikes=3): # If we use a constant time window then we are not looking at the magnitude, but th derivative # which could be what we want... use_flags = neoUtils.concatenate_epochs(blk) id = neoUtils.get_root(blk,unit_num) if len(use_flags)<10: print('{} has too few contacts'.format(id)) return -1 onset,offset = neoUtils.get_contact_apex_idx(blk,mode='time_win',time_win=time_win) all_var_mag = np.empty([len(onset),0]) for varname in ['M','F','TH','PHIE']: var = neoUtils.get_var(blk,varname) if varname in ['TH','PHIE']: var = neoUtils.center_var(var,use_flags) var_sliced = neoUtils.get_analog_contact_slices(var,use_flags) var_onset = worldGeometry.get_onset(var_sliced,onset,to_array=False) var_mag = np.array([x[-1] if len(x)>0 else np.zeros(var_sliced.shape[2]) for x in var_onset ]) all_var_mag = np.concatenate([all_var_mag,var_mag],axis=1) c_idx = np.empty(var_sliced.shape[1],dtype='f8') c_idx[:] = np.nan for n_spikes in range(max_spikes): temp_idx = spikeAnalysis.get_onset_contacts(blk,onset,num_spikes=n_spikes) c_idx[temp_idx]=n_spikes X =np.concatenate([all_var_mag,c_idx[:,np.newaxis]],axis=1) df = pd.DataFrame(X) df = df.rename(columns={0:'Mx',1:'My',2:'Mz',3:'Fx',4:'Fy',5:'Fz',6:'TH',7:'PHI',8:'n_spikes'}) dir_idx,med_dir = worldGeometry.get_contact_direction(blk,False) df['dir_idx'] = dir_idx df['med_dir'] = df.dir_idx.map({x:med_dir[x] for x in range(len(med_dir))}) df['id'] = [id for x in range(df.shape[0])] df['time_win'] = [time_win for x in range(df.shape[0])] return(df)
def get_components(fname,p_smooth=None,smooth_idx=9): ''' Get the PCA comonents given a filename''' varlist = ['M', 'F', 'TH', 'PHIE'] blk = neoUtils.get_blk(fname) cbool = neoUtils.get_Cbool(blk) root = neoUtils.get_root(blk,0)[:-2] X = GLM.create_design_matrix(blk,varlist) if p_smooth is not None: blk_smooth = GLM.get_blk_smooth(fname,p_smooth) Xdot = GLM.get_deriv(blk,blk_smooth,varlist,smoothing=[smooth_idx])[0] X = np.concatenate([X,Xdot],axis=1) X[np.invert(cbool),:]=0 X = neoUtils.replace_NaNs(X,'pchip') X = neoUtils.replace_NaNs(X,'interp') scaler = sklearn.preprocessing.StandardScaler(with_mean=False) X[cbool,:] = scaler.fit_transform(X[cbool,:]) pca = sklearn.decomposition.PCA() pca.fit_transform(X[cbool,:]) return(pca,root)
def batch_peak_PSTH_time(p_load, p_save): df = pd.DataFrame() for f in glob.glob(os.path.join(p_load, '*.h5')): blk = neoUtils.get_blk(f) print('Working on {}'.format(os.path.basename(f))) num_units = len(blk.channel_indexes[-1].units) # _,med_dir = worldGeometry.get_contact_direction(blk,plot_tgl=False) for unit_num in xrange(num_units): id = neoUtils.get_root(blk, unit_num) PSTH, t_edges, max_fr, med_dir = get_PSTH_by_dir(blk, unit_num) if PSTH is -1: continue peak_time = [ t_edges[x][np.nanargmax(PSTH[x])] for x in xrange(len(PSTH)) ] df_temp = pd.DataFrame() df_temp['id'] = [id for x in range(len(med_dir))] df_temp['med_dir'] = med_dir df_temp['peak_time'] = peak_time df = df.append(df_temp) df.to_csv(os.path.join(p_save, 'peak_PSTH_time.csv')) print('done')
def anova_analysis(blk, unit_num=0): use_flags = neoUtils.concatenate_epochs(blk) root = neoUtils.get_root(blk, unit_num) idx_dir, med_dir = worldGeometry.get_contact_direction(blk, plot_tgl=False) FR = spikeAnalysis.get_contact_sliced_trains(blk, unit_num)[0].magnitude idx_S = worldGeometry.get_radial_distance_group(blk, plot_tgl=False) # Create arclength groups if idx_S is -1: print('Only one arclength group') arclength_labels = ['Proximal'] elif idx_S is -2: print('Too few contacts') return (-1, -1) if np.max(idx_S) == 2: arclength_labels = ['Proximal', 'Medial', 'Distal'] elif np.max(idx_S) == 1: arclength_labels = ['Proximal', 'Distal'] idx_S = [arclength_labels[x] for x in idx_S] df = pd.DataFrame() directions = pd.DataFrame() df['Firing_Rate'] = FR df['Arclength'] = idx_S df['Direction'] = idx_dir df['id'] = root directions['med_dir'] = med_dir directions['Direction'] = list(set(df['Direction'])) df = df.merge(directions) df.dropna() formula = 'Firing_Rate ~ C(Direction) + C(Arclength) + C(Arclength):C(Direction)' model = ols(formula, df).fit(missing='drop') aov_table = anova_lm(model, typ=1) aov_table['id'] = root return df, aov_table
def calc_corr(fname, p_smooth, unit_num): blk = neoUtils.get_blk(fname) blk_smooth = GLM.get_blk_smooth(fname, p_smooth) varlist = ['M', 'F', 'TH', 'PHIE'] component_list = [ '{}_dot'.format(x) for x in ['Mx', 'My', 'Mz', 'Fx', 'Fy', 'Fz', 'TH', 'PHI'] ] root = neoUtils.get_root(blk, unit_num) Xdot = GLM.get_deriv(blk, blk_smooth, varlist)[0] Xdot = np.reshape(Xdot, [-1, 8, 10]) windows = np.arange(5, 100, 10) sp = neoUtils.concatenate_sp(blk)['cell_{}'.format(0)] cbool = neoUtils.get_Cbool(blk) corr = [] R = [] # loop over variables for ii in range(Xdot.shape[1]): var_in = Xdot[:, ii, :].copy() # loop over smoothing r = [] for jj in range(var_in.shape[1]): kernel = elephant.kernels.GaussianKernel(pq.ms * windows[jj]) FR = elephant.statistics.instantaneous_rate(sp, pq.ms, kernel=kernel) idx = np.isfinite(var_in[:, jj]) r.append( scipy.corrcoef(var_in[:, jj].ravel()[idx], FR.magnitude.ravel()[idx])[0, 1]) R.append(r) R = np.array(R) df = pd.DataFrame(data=R, columns=['{}ms'.format(x) for x in windows]) df.index = component_list return (df)
def get_corr_with_FR(): p_load = os.path.join(os.environ['BOX_PATH'], r'__VG3D\_deflection_trials\_NEO') p_save = os.path.join(os.environ['BOX_PATH'], r'__VG3D\_deflection_trials\_NEO\results') p_smooth = r'K:\VG3D\_rerun_with_pad\_deflection_trials\_NEO\smooth' DF = pd.DataFrame() for ii, f in enumerate(glob.glob(os.path.join(p_load, '*.h5'))): if ii == 0: continue blk = neoUtils.get_blk(f) num_units = len(blk.channel_indexes[-1].units) for unit_num in range(num_units): id = neoUtils.get_root(blk, unit_num) print('Working on {}'.format(id)) try: df = calc_corr(f, p_smooth, unit_num) df['id'] = id DF = DF.append(df) except: print('Problem on {}'.format(id)) DF.to_csv(os.path.join(p_save, 'derivative_corr_by_smoothing.csv'), index=True)
def smoothed(smooth_idx=9): smooth_vals = np.arange(5, 100, 10) sub_p_save = os.path.join( p_save, '{}ms_smoothing_deriv'.format(smooth_vals[smooth_idx])) if not os.path.isdir(sub_p_save): os.mkdir(sub_p_save) for f in glob.glob(os.path.join(p_load, '*NEO.h5')): try: blk = neoUtils.get_blk(f) blk_smooth = GLM.get_blk_smooth(f, p_smooth) num_units = len(blk.channel_indexes[-1].units) for unit_num in range(num_units): varlist = ['M', 'F', 'TH', 'PHIE'] root = neoUtils.get_root(blk, unit_num) print('Working on {}'.format(root)) outname = os.path.join( sub_p_save, '{}ms_{}_pillowX.mat'.format(smooth_vals[smooth_idx], root)) X = GLM.create_design_matrix(blk, varlist) Xdot = GLM.get_deriv(blk, blk_smooth, varlist, [smooth_idx])[0] X = np.concatenate([X, Xdot], axis=1) sp = neoUtils.concatenate_sp(blk)['cell_{}'.format(unit_num)] y = neoUtils.get_rate_b(blk, unit_num)[1] cbool = neoUtils.get_Cbool(blk) arclengths = get_arclength_bool(blk, unit_num) sio.savemat(outname, { 'X': X, 'y': y, 'cbool': cbool, 'arclengths': arclengths }) except Exception as ex: print('Problem with {}:{}'.format(os.path.basename(f), ex))
def onset_tuning(blk, unit_num=0, use_zeros=True): ''' Calculate the onset velocity in both terms of CP and in terms of rotation. Calculate the relationship between the onset firing rate and the different velcocities :param blk: :param unit_num: :param use_zeros: :return V_cp_fit,V_rot_fit: ''' use_flags = neoUtils.concatenate_epochs(blk) trains = spikeAnalysis.get_contact_sliced_trains(blk, unit_num)[-1] apex = neoUtils.get_contact_apex_idx(blk) * pq.ms apex_idx = apex.magnitude.astype('int') id = neoUtils.get_root(blk, unit_num) # get MB and FB at apex M = neoUtils.get_var(blk) MB = neoUtils.get_MB_MD(M)[0] MB_contacts = neoUtils.get_analog_contact_slices(MB, use_flags) MB_apex = neoUtils.get_value_at_idx(MB_contacts, apex_idx).squeeze() MB_dot = MB_apex / apex F = neoUtils.get_var(blk, 'F') FB = neoUtils.get_MB_MD(F)[0] FB_contacts = neoUtils.get_analog_contact_slices(FB, use_flags) FB_apex = neoUtils.get_value_at_idx(FB_contacts, apex_idx).squeeze() FB_dot = FB_apex / apex # Get onset FR onset_counts = np.array([ len(train.time_slice(train.t_start, train.t_start + dur)) for train, dur in zip(trains, apex) ]) onset_FR = np.divide(onset_counts, apex) onset_FR.units = 1 / pq.s # get V_onset_rot V_rot, _, D = worldGeometry.get_onset_velocity(blk) dir_idx, dir_angle = worldGeometry.get_contact_direction(blk, False) if dir_idx is -1: return (-1, -1, -1) df = pd.DataFrame() df['id'] = [id for x in xrange(MB_dot.shape[0])] df['MB'] = MB_apex df['MB_dot'] = MB_dot df['FB_dot'] = FB_dot df['FB'] = FB_apex df['rot'] = D df['rot_dot'] = V_rot df['dir_idx'] = dir_idx df['FR'] = onset_FR df['dir_angle'] = [dir_angle[x] for x in dir_idx] df = df.replace(np.inf, np.nan) df = df.dropna() # FIT: fits_all = pd.DataFrame( columns=['id', 'var', 'rvalue', 'pvalue', 'slope', 'intercept']) fits_direction = pd.DataFrame() idx = 0 idx2 = 0 for var in ['MB', 'MB_dot', 'FB', 'FB_dot', 'rot', 'rot_dot']: fit = stats.linregress(df[var], df['FR'])._asdict() fits_all.loc[idx, 'id'] = id fits_all.loc[idx, 'var'] = var for k, v in fit.iteritems(): fits_all.loc[idx, k] = v idx += 1 for direction in xrange(np.max(dir_idx) + 1): temp_idx = df['dir_idx'] == direction if not np.any(temp_idx): continue fit = stats.linregress(df[var][temp_idx], df['FR'][temp_idx])._asdict() fits_direction.loc[idx2, 'id'] = id fits_direction.loc[idx2, 'var'] = var fits_direction.loc[idx2, 'dir_idx'] = direction fits_direction.loc[idx2, 'med_dir'] = dir_angle[direction] for k, v in fit.iteritems(): fits_direction.loc[idx2, k] = v idx2 += 1 return (fits_all, fits_direction, df)
def phase_plots(blk,unit_num,save_tgl=False,bin_stretch=False,p_save=None,im_ext='png',dpi_res=300): ''' Plot Phase planes for My and Mz''' root = neoUtils.get_root(blk, unit_num) M = neoUtils.get_var(blk).magnitude sp = neoUtils.concatenate_sp(blk)['cell_{}'.format(unit_num)] r, b = neoUtils.get_rate_b(blk, unit_num, sigma=5 * pq.ms) use_flags = neoUtils.get_Cbool(blk) Mdot = mechanics.get_deriv(M) if bin_stretch: raise Exception('Not finished with use_flags') # MY, logit_y = nl(M[idx, 1], 90) # MZ, logit_z = nl(M[idx, 2], 90) # MY_dot, logit_ydot = nl(Mdot[idx, 1], 95) # MZ_dot, logit_zdot = nl(Mdot[idx, 2], 95) else: MY = M[:, 1] * 1e-6 MZ = M[:, 2] * 1e-6 MY_dot = Mdot[:, 1] * 1e-6 MZ_dot = Mdot[:, 2] * 1e-6 My_response,My_edges,Mydot_edges = varTuning.joint_response_hist(MY, MY_dot, r, use_flags, [100,30],min_obs=15) Mz_response,Mz_edges,Mzdot_edges = varTuning.joint_response_hist(MZ, MZ_dot, r, use_flags, [100,30],min_obs=15) if bin_stretch: My_edges = logit_y(My_edges) Mz_edges = logit_z(Mz_edges) Mydot_edges = logit_ydot(Mydot_edges) Mzdot_edges = logit_zdot(Mzdot_edges) else: pass axy = varTuning.plot_joint_response(My_response,My_edges,Mydot_edges,contour=False) axz = varTuning.plot_joint_response(Mz_response,Mz_edges,Mzdot_edges,contour=False) # Set bounds y_mask = My_response.mask.__invert__() if not y_mask.all(): axy.set_ylim(Mydot_edges[np.where(y_mask)[0].min()], Mydot_edges[np.where(y_mask)[0].max()]) axy.set_xlim(My_edges[np.where(y_mask)[1].min()], My_edges[np.where(y_mask)[1].max()]) z_mask = Mz_response.mask.__invert__() if not z_mask.all(): axz.set_ylim(Mzdot_edges[np.where(z_mask)[0].min()], Mzdot_edges[np.where(z_mask)[0].max()]) axz.set_xlim(Mz_edges[np.where(z_mask)[1].min()], Mz_edges[np.where(z_mask)[1].max()]) # other annotations axy.set_title('M$_y$ Phase Plane') axz.set_title('M$_z$ Phase Plane') axy.set_xlabel('M$_y$ ($\mu$N-m)') axy.set_ylabel('M$_\dot{y}$ ($\mu$N-m/ms)') axz.set_xlabel('M$_z$ ($\mu$N-m)') axz.set_ylabel('M$_\dot{z}$ ($\mu$N-m/ms)') axy.grid('off') axy.set_facecolor([0.6, 0.6, 0.6]) axy.axvline(color='k',linewidth=1) axy.axhline(color='k',linewidth=1) axz.grid('off') axz.set_facecolor([0.6, 0.6, 0.6]) axz.axvline(color='k', linewidth=1) axz.axhline(color='k', linewidth=1) plt.sca(axy) plt.tight_layout() if save_tgl: if p_save is None: raise ValueError("figure save location is required") else: plt.savefig(os.path.join(p_save,'{}_My_phaseplane.{}'.format(root,im_ext)),dpi=dpi_res) plt.sca(axz) plt.tight_layout() if save_tgl: if p_save is None: raise ValueError("figure save location is required") else: plt.savefig(os.path.join(p_save,'{}_Mz_phaseplane.{}'.format(root,im_ext)),dpi=dpi_res) plt.close('all')
def calc_world_geom_hist(p_load,p_save,n_bins=100): """ Since calculation takes so long on getting the histograms (mostly loading of data) we want to calculate them once and save the data. This calculates the Geometry. :param p_load: Location where all the neo h5 files live :param p_save: Location to save the output data files :param n_bins: Number of bins in with which to split the data :return None: Saves a 'world_geom_hists.npz' file. """ # init ID = [] all_S_bayes = [] all_TH_bayes = [] all_PHIE_bayes = [] all_ZETA_bayes = [] all_S_edges = [] all_TH_edges = [] all_PHIE_edges = [] all_ZETA_edges = [] # loop files for f in glob.glob(os.path.join(p_load,'rat*.h5')): # load in print(os.path.basename(f)) blk = neoUtils.get_blk(f) # get contact Cbool = neoUtils.get_Cbool(blk) use_flags = neoUtils.concatenate_epochs(blk) # get vars S = neoUtils.get_var(blk, 'S').magnitude TH = neoUtils.get_var(blk, 'TH').magnitude neoUtils.center_var(TH, use_flags) PHIE = neoUtils.get_var(blk, 'PHIE').magnitude neoUtils.center_var(PHIE, use_flags) ZETA = neoUtils.get_var(blk, 'ZETA').magnitude neoUtils.center_var(ZETA, use_flags) # loop units for unit in blk.channel_indexes[-1].units: # get unit info unit_num = int(unit.name[-1]) r, b = neoUtils.get_rate_b(blk, unit_num, sigma=5 * pq.ms) sp = neoUtils.concatenate_sp(blk)['cell_{}'.format(unit_num)] root = neoUtils.get_root(blk,unit_num) ID.append(root) # Create hists S_bayes, S_edges = varTuning.stim_response_hist(S.ravel(), r, Cbool, nbins=n_bins, min_obs=5) TH_bayes, TH_edges = varTuning.stim_response_hist(TH.ravel(), r, Cbool, nbins=n_bins, min_obs=5) PHIE_bayes, PHIE_edges = varTuning.stim_response_hist(PHIE.ravel(), r, Cbool, nbins=n_bins,min_obs=5) ZETA_bayes, ZETA_edges = varTuning.stim_response_hist(ZETA.ravel(), r, Cbool, nbins=n_bins,min_obs=5) # append outputs plt.close('all') all_S_bayes.append(S_bayes) all_TH_bayes.append(TH_bayes) all_PHIE_bayes.append(PHIE_bayes) all_ZETA_bayes.append(ZETA_bayes) all_S_edges.append(S_edges) all_TH_edges.append(TH_edges) all_PHIE_edges.append(PHIE_edges) all_ZETA_edges.append(ZETA_edges) np.savez(os.path.join(p_save, 'world_geom_hists.npz'), all_S_bayes=all_S_bayes, all_TH_bayes=all_TH_bayes, all_PHIE_bayes=all_PHIE_bayes, all_ZETA_bayes=all_ZETA_bayes, all_S_edges=all_S_edges, all_TH_edges=all_TH_edges, all_PHIE_edges=all_PHIE_edges, all_ZETA_edges=all_ZETA_edges, ID=ID )
def calc_all_mech_hists(p_load,p_save,n_bins=100): """ Since calculation takes so long on getting the histograms (mostly loading of data) we want to calculate them once and save the data. This calculates the mechanics. :param p_load: Location where all the neo h5 files live :param p_save: Location to save the output data files :param n_bins: Number of bins in with which to split the data :return None: Saves a 'mech_histograms.npz' file. """ # TODO: This is currently pretty gross, it is really too hardcoded (I wrote it in a car). Do better. # TODO: Combine with geometry # Case in point: all_F_edges = [] all_M_edges = [] all_F_bayes = [] all_M_bayes = [] all_MB_edges = [] all_MD_edges = [] all_MD_bayes = [] all_MB_bayes = [] ID = [] # Loop all neo files for f in glob.glob(os.path.join(p_load,'rat*.h5')): print(os.path.basename(f)) blk = neoUtils.get_blk(f) Cbool = neoUtils.get_Cbool(blk) # Loop all units for unit in blk.channel_indexes[-1].units: unit_num = int(unit.name[-1]) # grab needed variables r, b = neoUtils.get_rate_b(blk, unit_num, sigma=5 * pq.ms) sp = neoUtils.concatenate_sp(blk)['cell_{}'.format(unit_num)] root = neoUtils.get_root(blk,unit_num) M = neoUtils.get_var(blk).magnitude F = neoUtils.get_var(blk,'F').magnitude MB, MD = neoUtils.get_MB_MD(M) # init histograms M_bayes = np.empty([n_bins,3]) F_bayes = np.empty([n_bins, 3]) M_edges = np.empty([n_bins+1, 3]) F_edges = np.empty([n_bins+1, 3]) #calculate tuning curves (seperately on each dimension) for ii in range(3): F_bayes[:, ii], F_edges[:, ii] = varTuning.stim_response_hist(F[:, ii] * 1e6, r, Cbool, nbins=n_bins, min_obs=5) M_bayes[:, ii], M_edges[:, ii] = varTuning.stim_response_hist(M[:, ii] * 1e6, r, Cbool, nbins=n_bins, min_obs=5) MB_bayes, MB_edges = varTuning.stim_response_hist(MB.squeeze() * 1e6, r, Cbool, nbins=n_bins, min_obs=5) MD_bayes, MD_edges,_,_ = varTuning.angular_response_hist(MD.squeeze(), r, Cbool, nbins=n_bins) plt.close('all') # append to output lists all_F_edges.append(F_edges) all_M_edges.append(M_edges) all_MB_edges.append(MB_edges) all_MD_edges.append(MD_edges) all_F_bayes.append(F_bayes) all_M_bayes.append(M_bayes) all_MB_bayes.append(MB_bayes) all_MD_bayes.append(MD_bayes) ID.append(root) # save np.savez(os.path.join(p_save,'mech_histograms.npz'), all_F_bayes=all_F_bayes, all_F_edges=all_F_edges, all_M_bayes=all_M_bayes, all_M_edges=all_M_edges, all_MB_bayes=all_MB_bayes, all_MB_edges=all_MB_edges, all_MD_bayes=all_MD_bayes, all_MD_edges=all_MD_edges, ID=ID )