def create_design_matrix(blk,varlist,window=1,binsize=1,deriv_tgl=False,bases=None): ''' Takes a list of variables and turns it into a matrix. Sets the non-contact mechanics to zero, but keeps all the kinematics as NaN You can append the derivative or apply the pillow bases, or both. Scales, but does not center the output ''' X = [] if type(window)==pq.quantity.Quantity: window = int(window) if type(binsize)==pq.quantity.Quantity: binsize = int(binsize) Cbool = neoUtils.get_Cbool(blk,-1) use_flags = neoUtils.concatenate_epochs(blk) # ================================ # # GET THE CONCATENATED DESIGN MATRIX OF REQUESTED VARS # ================================ # for varname in varlist: if varname in ['MB','FB']: var = neoUtils.get_var(blk,varname[0],keep_neo=False)[0] var = neoUtils.get_MB_MD(var)[0] var[np.invert(Cbool)]=0 elif varname in ['MD','FD']: var = neoUtils.get_var(blk,varname[0],keep_neo=False)[0] var = neoUtils.get_MB_MD(var)[1] var[np.invert(Cbool)]=0 elif varname in ['ROT','ROTD']: TH = neoUtils.get_var(blk,'TH',keep_neo=False)[0] PH = neoUtils.get_var(blk,'PHIE',keep_neo=False)[0] TH = neoUtils.center_var(TH,use_flags=use_flags) PH = neoUtils.center_var(PH,use_flags=use_flags) TH[np.invert(Cbool)] = 0 PH[np.invert(Cbool)] = 0 if varname=='ROT': var = np.sqrt(TH**2+PH**2) else: var = np.arctan2(PH,TH) else: var = neoUtils.get_var(blk,varname, keep_neo=False)[0] if varname in ['M','F']: var[np.invert(Cbool),:]=0 if varname in ['TH','PHIE']: var = neoUtils.center_var(var,use_flags) var[np.invert(Cbool),:]=0 var = neoUtils.replace_NaNs(var,'pchip') var = neoUtils.replace_NaNs(var,'interp') X.append(var) X = np.concatenate(X, axis=1) return X
def get_X(blk): use_flags = neoUtils.concatenate_epochs(blk) cbool = neoUtils.get_Cbool(blk) M = neoUtils.get_var(blk, 'M').magnitude F = neoUtils.get_var(blk, 'F').magnitude TH = neoUtils.get_var(blk, 'TH').magnitude PH = neoUtils.get_var(blk, 'PHIE').magnitude # center angles deltaTH = neoUtils.center_var(TH, use_flags) deltaPH = neoUtils.center_var(PH, use_flags) deltaTH[np.invert(cbool)] = np.nan deltaPH[np.invert(cbool)] = np.nan X = np.concatenate([M, F, deltaTH, deltaPH], axis=1) return(X)
def get_deriv(blk,blk_smooth,varlist,smoothing=range(10)): """ :param blk: :param blk_smooth: :param varlist: :param smoothing: A list of indices of which smoothing parameter to use. Default is all 10 :return: Xdot, X """ use_flags = neoUtils.concatenate_epochs(blk) Cbool = neoUtils.get_Cbool(blk) X =[] for varname in varlist: var = neoUtils.get_var(blk_smooth, varname+'_smoothed', keep_neo=False)[0] if varname in ['M', 'F']: var[np.invert(Cbool), :, :] = 0 if varname in ['TH', 'PHIE']: for ii in smoothing: var[:, :, ii] = neoUtils.center_var(var[:,:,ii], use_flags) var[np.invert(Cbool), :, :] = 0 var = var[:, :, smoothing] # var = neoUtils.replace_NaNs(var, 'pchip') # var = neoUtils.replace_NaNs(var, 'interp') X.append(var) X = np.concatenate(X, axis=1) zero_pad = np.zeros([1,X.shape[1],X.shape[2]]) Xdot = np.diff(np.concatenate([zero_pad,X],axis=0),axis=0) Xdot = np.reshape(Xdot,[Xdot.shape[0],Xdot.shape[1]*Xdot.shape[2]]) X = np.reshape(X,[X.shape[0],X.shape[1]*X.shape[2]]) return(Xdot,X)
def get_delta_angle(blk): ''' This function returns the changes in world angle with respect to the first frame of contact. This should give us an estimate of how much the whisker is rotating in the follicle :param blk: a neo block :return th_contact, phie_contacts : a [t x n] matrix where t is the number of time samples in the longest contact and n is the number of contacts ''' PHIE = neoUtils.get_var(blk, 'PHIE') TH = neoUtils.get_var(blk, 'TH') use_flags = neoUtils.concatenate_epochs(blk, epoch_idx=-1) phie_contacts = neoUtils.get_analog_contact_slices(PHIE, use_flags).squeeze() th_contacts = neoUtils.get_analog_contact_slices(TH, use_flags).squeeze() d = np.sqrt(phie_contacts**2 + th_contacts**2) use = np.invert(np.all(np.isnan(d), axis=0)) # remove all nan slices return (th_contacts[:, use], phie_contacts[:, use])
def get_radial_distance_group(blk, plot_tgl=False): S = neoUtils.get_var(blk, 'S') use_flags = neoUtils.concatenate_epochs(blk, -1) S_contacts = neoUtils.get_analog_contact_slices(S, use_flags) S_med = np.nanmedian(S_contacts, axis=0) mask = [np.isfinite(S_med).ravel()] S_med_masked = S_med[mask] if len(S_med_masked) < 10: return (-2) clf3 = mixture.GaussianMixture(n_components=3, n_init=100) clf2 = mixture.GaussianMixture(n_components=2, n_init=100) clf3.fit(S_med_masked) clf2.fit(S_med_masked) if clf2.aic(S_med_masked) < clf3.aic(S_med_masked): n_clusts = 2 idx = clf2.predict(S_med_masked) else: n_clusts = 3 idx = clf3.predict(S_med_masked) S_clusts = [] for ii in xrange(n_clusts): S_clusts.append(np.nanmedian(S_med_masked[idx == ii])) ordering = np.argsort(S_clusts) idx = np.array([np.where(x == ordering)[0][0] for x in idx]) S_clusts.sort() if np.any(np.isnan(S_clusts)): return (-1) idx_out = np.zeros(S_med.shape[0], dtype='int') idx_out[mask] = idx bin_edges = np.histogram(S_med_masked, 50)[1][:-1] if plot_tgl: sns.set_style('ticks') for ii in xrange(n_clusts): if n_clusts == 2: cc = plotVG3D.arclength_group_colors()[0::2] else: cc = plotVG3D.arclength_group_colors() sns.distplot(S_med[idx == ii], bins=bin_edges, color=cc[ii], kde=False) ax = plt.gca() ax.set_ylabel('Number of contacts') ax.set_xlabel('Arclength at contact (m)') ax.grid('off', axis='x') ax.set_title('{}'.format(neoUtils.get_root(blk, 0))) sns.despine() return (idx_out)
def get_PS_given_R(blk, unit_num=0): if True: raise Exception('This doesnt work yet') CP = neoUtils.get_var(blk, 'CP') S = float(blk.annotations['s'][2:-1]) CP /= S FR = neoUtils.get_rate_b(blk, unit_num=unit_num, sigma=2 * pq.ms)[1] spiked = np.logical_and(np.all(np.isfinite(CP), axis=1), FR) idx = np.all(np.isfinite(CP), axis=1) PR_S, edges = np.histogramdd(CP.magnitude[spiked, :], bins=50) PS, edges = np.histogramdd(CP.magnitude[idx, :], bins=50) return (post)
def MB_curve(blk,unit_num,save_tgl=False,im_ext='svg',dpi_res=300): root = neoUtils.get_root(blk, unit_num) M = neoUtils.get_var(blk) use_flags = neoUtils.get_Cbool(blk) MB = mechanics.get_MB_MD(M)[0].magnitude.ravel() MB[np.invert(use_flags)]=0 sp = neoUtils.concatenate_sp(blk)['cell_{}'.format(unit_num)] r, b = neoUtils.get_rate_b(blk, unit_num, sigma=5 * pq.ms) MB_bayes,edges = varTuning.stim_response_hist(MB*1e6,r,use_flags,nbins=100,min_obs=5) fig = plt.figure() ax = fig.add_subplot(111) ax.plot(edges[:-1],MB_bayes,'o',color='k') ax.set_ylabel('Spike Rate (sp/s)') ax.set_xlabel('Bending Moment ($\mu$N-m)') plt.tight_layout() if save_tgl: plt.savefig('./figs/{}_MB_tuning.{}'.format(root,im_ext),dpi=dpi_res) plt.close('all')
def mymz_space(blk,unit_num,bin_stretch=False,save_tgl=False,p_save=None,im_ext='png',dpi_res=300): root = neoUtils.get_root(blk,unit_num) use_flags = neoUtils.get_Cbool(blk) M = neoUtils.get_var(blk).magnitude sp = neoUtils.concatenate_sp(blk)['cell_{}'.format(unit_num)] idx = np.all(np.isfinite(M),axis=1) if bin_stretch: MY = np.empty(M.shape[0]) MZ = np.empty(M.shape[0]) MY[idx], logit_y = nl(M[idx, 1],90) MZ[idx], logit_z = nl(M[idx, 2],90) else: MY = M[:,1]*1e-6 MZ = M[:,2]*1e-6 response, var1_edges,var2_edges = varTuning.joint_response_hist(MY,MZ,sp,use_flags,bins = 100,min_obs=15) if bin_stretch: var1_edges = logit_y(var1_edges) var2_edges = logit_z(var2_edges) else: pass ax = varTuning.plot_joint_response(response,var1_edges,var2_edges,contour=False) ax.axvline(color='k',linewidth=1) ax.axhline(color='k',linewidth=1) ax.patch.set_color([0.6,0.6,0.6]) mask = response.mask.__invert__() if not mask.all(): ax.set_ylim(var2_edges[np.where(mask)[0].min()], var2_edges[np.where(mask)[0].max()]) ax.set_xlim(var1_edges[np.where(mask)[1].min()], var1_edges[np.where(mask)[1].max()]) ax.set_xlabel('M$_y$ ($\mu$N-m)') ax.set_ylabel('M$_z$ ($\mu$N-m)') plt.draw() plt.tight_layout() if save_tgl: if p_save is None: raise ValueError("figure save location is required") else: plt.savefig(os.path.join(p_save,'{}_mymz.{}'.format(root,im_ext)),dpi=dpi_res) plt.close('all')
def FX_plots(blk,unit_num,save_tgl=False,im_ext='svg',dpi_res=300): root = neoUtils.get_root(blk, unit_num) F = neoUtils.get_var(blk,'F') Fx = F.magnitude[:,0] use_flags = neoUtils.get_Cbool(blk) sp = neoUtils.concatenate_sp(blk)['cell_{}'.format(unit_num)] r, b = neoUtils.get_rate_b(blk, unit_num, sigma=5 * pq.ms) Fx[np.invert(use_flags)] = 0 Fx_bayes, edges = varTuning.stim_response_hist(Fx * 1e6, r, use_flags, nbins=50, min_obs=5) fig = plt.figure() ax = fig.add_subplot(111) ax.plot(edges[:-1], Fx_bayes*1000, 'o', color='k') ax.set_ylabel('Spike Rate (sp/s)') ax.set_xlabel('Axial Force ($\mu$N-m)') plt.tight_layout() if save_tgl: plt.savefig('./figs/{}_Fx_tuning.{}'.format(root,im_ext), dpi=dpi_res) plt.close('all')
def ent_analyses(blk, X_disc=128, Y_disc=64): CP = neoUtils.get_var(blk, 'CP') S = float(blk.annotations['s'][2:-1]) CP /= S CP = CP.magnitude idx = np.all(np.isfinite(CP), axis=1) s = np.empty_like(CP) s[:] = np.nan s[idx, :] = pye.quantise(CP[idx, :], X_disc, uniform='bins')[0] FR = neoUtils.get_rate_b(blk, unit_num=unit_num, sigma=2 * pq.ms)[0] FR = pye.quantise(FR, Y_disc, uniform='bins')[0] idx = np.all(np.isfinite(s), axis=1) X = s.astype('int64').T[:, idx] Y = FR[np.newaxis, idx] DS = pye.DiscreteSystem(X, (X.shape[0], bins), Y, (1, bins)) DS.calculate_entropies() #TODO: I have created a discrete FR and Stimulus, now I need to perform the actual entropy calcs if True: raise Exception('This is not done')
def create_threshold_DF(blk,unit_num=0,time_win=20,max_spikes=3): # If we use a constant time window then we are not looking at the magnitude, but th derivative # which could be what we want... use_flags = neoUtils.concatenate_epochs(blk) id = neoUtils.get_root(blk,unit_num) if len(use_flags)<10: print('{} has too few contacts'.format(id)) return -1 onset,offset = neoUtils.get_contact_apex_idx(blk,mode='time_win',time_win=time_win) all_var_mag = np.empty([len(onset),0]) for varname in ['M','F','TH','PHIE']: var = neoUtils.get_var(blk,varname) if varname in ['TH','PHIE']: var = neoUtils.center_var(var,use_flags) var_sliced = neoUtils.get_analog_contact_slices(var,use_flags) var_onset = worldGeometry.get_onset(var_sliced,onset,to_array=False) var_mag = np.array([x[-1] if len(x)>0 else np.zeros(var_sliced.shape[2]) for x in var_onset ]) all_var_mag = np.concatenate([all_var_mag,var_mag],axis=1) c_idx = np.empty(var_sliced.shape[1],dtype='f8') c_idx[:] = np.nan for n_spikes in range(max_spikes): temp_idx = spikeAnalysis.get_onset_contacts(blk,onset,num_spikes=n_spikes) c_idx[temp_idx]=n_spikes X =np.concatenate([all_var_mag,c_idx[:,np.newaxis]],axis=1) df = pd.DataFrame(X) df = df.rename(columns={0:'Mx',1:'My',2:'Mz',3:'Fx',4:'Fy',5:'Fz',6:'TH',7:'PHI',8:'n_spikes'}) dir_idx,med_dir = worldGeometry.get_contact_direction(blk,False) df['dir_idx'] = dir_idx df['med_dir'] = df.dir_idx.map({x:med_dir[x] for x in range(len(med_dir))}) df['id'] = [id for x in range(df.shape[0])] df['time_win'] = [time_win for x in range(df.shape[0])] return(df)
import glob import pandas as pd import os import numpy as np import neoUtils recording_length = [] frame_length = [] root = [] p_save = r'C:\Users\guru\Box Sync\___hartmann_lab\papers\VG3D\summary_data_used' for f in glob.glob( os.path.join(os.environ['BOX_PATH'], r'__VG3D\_deflection_trials\_NEO\*.h5')): print('Working on {}'.format(os.path.basename(f))) blk = neoUtils.get_blk(f) M = neoUtils.get_var(blk) t = M.t_stop.magnitude recording_length.append(M.t_stop) root.append(neoUtils.get_root(blk, 0)) year = neoUtils.get_root(blk, 0)[:4] if year == '2017': frames = int(np.round((t * 1000) / (1000. / 500.))) else: frames = int(np.round((t * 1000) / (1000. / 300.))) frame_length.append(frames) df = pd.DataFrame() df['id'] = root df['Time (s)'] = recording_length df['Number of Frames'] = frame_length df.to_csv(os.path.join(p_save, 'recording_lengths.csv'), index=False)
def calc_world_geom_hist(p_load,p_save,n_bins=100): """ Since calculation takes so long on getting the histograms (mostly loading of data) we want to calculate them once and save the data. This calculates the Geometry. :param p_load: Location where all the neo h5 files live :param p_save: Location to save the output data files :param n_bins: Number of bins in with which to split the data :return None: Saves a 'world_geom_hists.npz' file. """ # init ID = [] all_S_bayes = [] all_TH_bayes = [] all_PHIE_bayes = [] all_ZETA_bayes = [] all_S_edges = [] all_TH_edges = [] all_PHIE_edges = [] all_ZETA_edges = [] # loop files for f in glob.glob(os.path.join(p_load,'rat*.h5')): # load in print(os.path.basename(f)) blk = neoUtils.get_blk(f) # get contact Cbool = neoUtils.get_Cbool(blk) use_flags = neoUtils.concatenate_epochs(blk) # get vars S = neoUtils.get_var(blk, 'S').magnitude TH = neoUtils.get_var(blk, 'TH').magnitude neoUtils.center_var(TH, use_flags) PHIE = neoUtils.get_var(blk, 'PHIE').magnitude neoUtils.center_var(PHIE, use_flags) ZETA = neoUtils.get_var(blk, 'ZETA').magnitude neoUtils.center_var(ZETA, use_flags) # loop units for unit in blk.channel_indexes[-1].units: # get unit info unit_num = int(unit.name[-1]) r, b = neoUtils.get_rate_b(blk, unit_num, sigma=5 * pq.ms) sp = neoUtils.concatenate_sp(blk)['cell_{}'.format(unit_num)] root = neoUtils.get_root(blk,unit_num) ID.append(root) # Create hists S_bayes, S_edges = varTuning.stim_response_hist(S.ravel(), r, Cbool, nbins=n_bins, min_obs=5) TH_bayes, TH_edges = varTuning.stim_response_hist(TH.ravel(), r, Cbool, nbins=n_bins, min_obs=5) PHIE_bayes, PHIE_edges = varTuning.stim_response_hist(PHIE.ravel(), r, Cbool, nbins=n_bins,min_obs=5) ZETA_bayes, ZETA_edges = varTuning.stim_response_hist(ZETA.ravel(), r, Cbool, nbins=n_bins,min_obs=5) # append outputs plt.close('all') all_S_bayes.append(S_bayes) all_TH_bayes.append(TH_bayes) all_PHIE_bayes.append(PHIE_bayes) all_ZETA_bayes.append(ZETA_bayes) all_S_edges.append(S_edges) all_TH_edges.append(TH_edges) all_PHIE_edges.append(PHIE_edges) all_ZETA_edges.append(ZETA_edges) np.savez(os.path.join(p_save, 'world_geom_hists.npz'), all_S_bayes=all_S_bayes, all_TH_bayes=all_TH_bayes, all_PHIE_bayes=all_PHIE_bayes, all_ZETA_bayes=all_ZETA_bayes, all_S_edges=all_S_edges, all_TH_edges=all_TH_edges, all_PHIE_edges=all_PHIE_edges, all_ZETA_edges=all_ZETA_edges, ID=ID )
def calc_all_mech_hists(p_load,p_save,n_bins=100): """ Since calculation takes so long on getting the histograms (mostly loading of data) we want to calculate them once and save the data. This calculates the mechanics. :param p_load: Location where all the neo h5 files live :param p_save: Location to save the output data files :param n_bins: Number of bins in with which to split the data :return None: Saves a 'mech_histograms.npz' file. """ # TODO: This is currently pretty gross, it is really too hardcoded (I wrote it in a car). Do better. # TODO: Combine with geometry # Case in point: all_F_edges = [] all_M_edges = [] all_F_bayes = [] all_M_bayes = [] all_MB_edges = [] all_MD_edges = [] all_MD_bayes = [] all_MB_bayes = [] ID = [] # Loop all neo files for f in glob.glob(os.path.join(p_load,'rat*.h5')): print(os.path.basename(f)) blk = neoUtils.get_blk(f) Cbool = neoUtils.get_Cbool(blk) # Loop all units for unit in blk.channel_indexes[-1].units: unit_num = int(unit.name[-1]) # grab needed variables r, b = neoUtils.get_rate_b(blk, unit_num, sigma=5 * pq.ms) sp = neoUtils.concatenate_sp(blk)['cell_{}'.format(unit_num)] root = neoUtils.get_root(blk,unit_num) M = neoUtils.get_var(blk).magnitude F = neoUtils.get_var(blk,'F').magnitude MB, MD = neoUtils.get_MB_MD(M) # init histograms M_bayes = np.empty([n_bins,3]) F_bayes = np.empty([n_bins, 3]) M_edges = np.empty([n_bins+1, 3]) F_edges = np.empty([n_bins+1, 3]) #calculate tuning curves (seperately on each dimension) for ii in range(3): F_bayes[:, ii], F_edges[:, ii] = varTuning.stim_response_hist(F[:, ii] * 1e6, r, Cbool, nbins=n_bins, min_obs=5) M_bayes[:, ii], M_edges[:, ii] = varTuning.stim_response_hist(M[:, ii] * 1e6, r, Cbool, nbins=n_bins, min_obs=5) MB_bayes, MB_edges = varTuning.stim_response_hist(MB.squeeze() * 1e6, r, Cbool, nbins=n_bins, min_obs=5) MD_bayes, MD_edges,_,_ = varTuning.angular_response_hist(MD.squeeze(), r, Cbool, nbins=n_bins) plt.close('all') # append to output lists all_F_edges.append(F_edges) all_M_edges.append(M_edges) all_MB_edges.append(MB_edges) all_MD_edges.append(MD_edges) all_F_bayes.append(F_bayes) all_M_bayes.append(M_bayes) all_MB_bayes.append(MB_bayes) all_MD_bayes.append(MD_bayes) ID.append(root) # save np.savez(os.path.join(p_save,'mech_histograms.npz'), all_F_bayes=all_F_bayes, all_F_edges=all_F_edges, all_M_bayes=all_M_bayes, all_M_edges=all_M_edges, all_MB_bayes=all_MB_bayes, all_MB_edges=all_MB_edges, all_MD_bayes=all_MD_bayes, all_MD_edges=all_MD_edges, ID=ID )
def shadeVector(cc, color='k', ax=None): if ax is None: ax = plt.gca() ylim = ax.get_ylim() for start, dur in zip(cc.times.magnitude, cc.durations.magnitude): ax.fill([start, start, start + dur, start + dur], [ylim[0], ylim[1], ylim[1], ylim[0]], color, alpha=0.1) wd = figsize[0] ht = wd / 2 M = neoUtils.get_var(blk, 'M') F = neoUtils.get_var(blk, 'F') TH = neoUtils.get_var(blk, 'TH') PH = neoUtils.get_var(blk, 'PHIE') TH = neoUtils.center_var(TH, cc) PH = neoUtils.center_var(PH, cc) TH[np.invert(cbool)] = np.nan PH[np.invert(cbool)] = np.nan for start, stop in zip(starts, stops): fig, ax = plt.subplots(4, 1, figsize=(wd, ht)) sp_slice = sp.time_slice(start, stop) ax[0].plot(M.time_slice(start, stop)) ax[1].plot(F.time_slice(start, stop)) ax[2].plot(TH.time_slice(start, stop)) ax[3].plot(PH.time_slice(start, stop)) for _ax in ax:
def phase_plots(blk,unit_num,save_tgl=False,bin_stretch=False,p_save=None,im_ext='png',dpi_res=300): ''' Plot Phase planes for My and Mz''' root = neoUtils.get_root(blk, unit_num) M = neoUtils.get_var(blk).magnitude sp = neoUtils.concatenate_sp(blk)['cell_{}'.format(unit_num)] r, b = neoUtils.get_rate_b(blk, unit_num, sigma=5 * pq.ms) use_flags = neoUtils.get_Cbool(blk) Mdot = mechanics.get_deriv(M) if bin_stretch: raise Exception('Not finished with use_flags') # MY, logit_y = nl(M[idx, 1], 90) # MZ, logit_z = nl(M[idx, 2], 90) # MY_dot, logit_ydot = nl(Mdot[idx, 1], 95) # MZ_dot, logit_zdot = nl(Mdot[idx, 2], 95) else: MY = M[:, 1] * 1e-6 MZ = M[:, 2] * 1e-6 MY_dot = Mdot[:, 1] * 1e-6 MZ_dot = Mdot[:, 2] * 1e-6 My_response,My_edges,Mydot_edges = varTuning.joint_response_hist(MY, MY_dot, r, use_flags, [100,30],min_obs=15) Mz_response,Mz_edges,Mzdot_edges = varTuning.joint_response_hist(MZ, MZ_dot, r, use_flags, [100,30],min_obs=15) if bin_stretch: My_edges = logit_y(My_edges) Mz_edges = logit_z(Mz_edges) Mydot_edges = logit_ydot(Mydot_edges) Mzdot_edges = logit_zdot(Mzdot_edges) else: pass axy = varTuning.plot_joint_response(My_response,My_edges,Mydot_edges,contour=False) axz = varTuning.plot_joint_response(Mz_response,Mz_edges,Mzdot_edges,contour=False) # Set bounds y_mask = My_response.mask.__invert__() if not y_mask.all(): axy.set_ylim(Mydot_edges[np.where(y_mask)[0].min()], Mydot_edges[np.where(y_mask)[0].max()]) axy.set_xlim(My_edges[np.where(y_mask)[1].min()], My_edges[np.where(y_mask)[1].max()]) z_mask = Mz_response.mask.__invert__() if not z_mask.all(): axz.set_ylim(Mzdot_edges[np.where(z_mask)[0].min()], Mzdot_edges[np.where(z_mask)[0].max()]) axz.set_xlim(Mz_edges[np.where(z_mask)[1].min()], Mz_edges[np.where(z_mask)[1].max()]) # other annotations axy.set_title('M$_y$ Phase Plane') axz.set_title('M$_z$ Phase Plane') axy.set_xlabel('M$_y$ ($\mu$N-m)') axy.set_ylabel('M$_\dot{y}$ ($\mu$N-m/ms)') axz.set_xlabel('M$_z$ ($\mu$N-m)') axz.set_ylabel('M$_\dot{z}$ ($\mu$N-m/ms)') axy.grid('off') axy.set_facecolor([0.6, 0.6, 0.6]) axy.axvline(color='k',linewidth=1) axy.axhline(color='k',linewidth=1) axz.grid('off') axz.set_facecolor([0.6, 0.6, 0.6]) axz.axvline(color='k', linewidth=1) axz.axhline(color='k', linewidth=1) plt.sca(axy) plt.tight_layout() if save_tgl: if p_save is None: raise ValueError("figure save location is required") else: plt.savefig(os.path.join(p_save,'{}_My_phaseplane.{}'.format(root,im_ext)),dpi=dpi_res) plt.sca(axz) plt.tight_layout() if save_tgl: if p_save is None: raise ValueError("figure save location is required") else: plt.savefig(os.path.join(p_save,'{}_Mz_phaseplane.{}'.format(root,im_ext)),dpi=dpi_res) plt.close('all')
def plot_smooth_hists(blk,blk_smooth,unit_num=0,p_save=None,nbins=75): DPI_RES=600 id = neoUtils.get_root(blk, unit_num) fig_name = os.path.join(p_save, '{}_derivative_smoothing_compare.png'.format(id)) if os.path.isfile(fig_name): print('{} found, skipping...'.format(fig_name)) return(None) smoothing_windows = range(5,101,10) use_flags = neoUtils.concatenate_epochs(blk) cbool = neoUtils.get_Cbool(blk) r,b =neoUtils.get_rate_b(blk,unit_num,2*pq.ms) # catch empty smoothed data if len(blk_smooth.segments)==0 or len(blk_smooth.segments[0].analogsignals)==0: print('Smoothed data not found in {}'.format(id)) return(-1) # get vars M = neoUtils.get_var(blk_smooth,'M_smoothed').magnitude M[np.invert(cbool),:]=np.nan Mdot = neoUtils.get_deriv(M) F = neoUtils.get_var(blk_smooth,'F_smoothed').magnitude F[np.invert(cbool),:]=np.nan Fdot = neoUtils.get_deriv(F) PHI = neoUtils.get_var(blk_smooth,'PHIE_smoothed').magnitude PHI = neoUtils.center_var(PHI.squeeze(),use_flags) PHI[np.invert(cbool),:]=np.nan PHIdot = neoUtils.get_deriv(PHI) TH = neoUtils.get_var(blk_smooth,'TH_smoothed').magnitude TH = neoUtils.center_var(TH.squeeze(),use_flags) TH[np.invert(cbool),:]=np.nan THdot = neoUtils.get_deriv(TH) # ROT = np.sqrt(np.add(np.power(PHI,2),np.power(TH,2))) # ROTdot = neoUtils.get_deriv(ROT) # calculate histograms R_Mdot, bins_Mdot, edgesx_Mdot, edgesy_Mdot = mult_join_plots(Mdot[:, 1, :], Mdot[:, 2, :], r, cbool, bins=nbins) newbins =[np.linspace(bins_Mdot[0][edgesx_Mdot][0],bins_Mdot[0][edgesx_Mdot][1],nbins), np.linspace(bins_Mdot[1][edgesy_Mdot][0], bins_Mdot[1][edgesy_Mdot][1], nbins)] R_Mdot, bins_Mdot, edgesx_Mdot, edgesy_Mdot = mult_join_plots(Mdot[:, 1, :], Mdot[:, 2, :], r, cbool, bins=newbins) R_Fdot, bins_Fdot, edgesx_Fdot, edgesy_Fdot = mult_join_plots(Fdot[:, 1, :], Fdot[:, 2, :], r, cbool,bins=nbins) newbins = [np.linspace(bins_Fdot[0][edgesx_Fdot][0], bins_Fdot[0][edgesx_Fdot][1], nbins), np.linspace(bins_Fdot[1][edgesy_Fdot][0], bins_Fdot[1][edgesy_Fdot][1], nbins)] R_Fdot, bins_Fdot, edgesx_Fdot, edgesy_Fdot = mult_join_plots(Fdot[:, 1, :], Fdot[:, 2, :], r, cbool, bins=newbins) R_ROTdot, bins_ROTdot, edgesx_ROTdot, edgesy_ROTdot = mult_join_plots(THdot, PHIdot, r, cbool,bins=nbins) newbins = [np.linspace(bins_ROTdot[0][edgesx_ROTdot][0], bins_ROTdot[0][edgesx_ROTdot][1], nbins), np.linspace(bins_ROTdot[1][edgesy_ROTdot][0], bins_ROTdot[1][edgesy_ROTdot][1], nbins)] R_ROTdot, bins_ROTdot, edgesx_ROTdot, edgesy_ROTdot = mult_join_plots(THdot, PHIdot, r, cbool, bins=newbins) FR = [] FR.append(np.nanmax([x.max() for x in R_Mdot.values()])) FR.append(np.nanmax([x.max() for x in R_Fdot.values()])) FR.append(np.nanmax([x.max() for x in R_ROTdot.values()])) colormax = np.nanmax(FR) # Plots f = plt.figure() figManager = plt.get_current_fig_manager() figManager.window.showMaximized() # hardcoded for 5 smoothing steps for loc,ii in enumerate(range(0,10,2)): ax = f.add_subplot(3,5,loc+1) ax.pcolormesh(bins_Mdot[0],bins_Mdot[1],R_Mdot[ii], cmap='OrRd', edgecolors='None',vmin=0,vmax=colormax) ax.set_xlim(bins_Mdot[0][edgesx_Mdot]) ax.set_ylim(bins_Mdot[1][edgesy_Mdot]) ax.set_title('Smoothing window = {}ms'.format(smoothing_windows[ii])) ax.axvline(color='k',linewidth=1) ax.axhline(color='k',linewidth=1) if ii==0: ax.set_ylabel('$\\dot{M_y}$ vs $\\dot{M_z}$',rotation=0,labelpad=20) for loc,ii in enumerate(range(0,10,2)): ax = f.add_subplot(3, 5, loc + 1+5) ax.pcolormesh(bins_Fdot[0], bins_Fdot[1], R_Fdot[ii], cmap='OrRd', edgecolors='None', vmin=0, vmax=colormax) ax.set_xlim(bins_Fdot[0][edgesx_Fdot]) ax.set_ylim(bins_Fdot[1][edgesy_Fdot]) ax.axvline(color='k', linewidth=1) ax.axhline(color='k', linewidth=1) if ii==0: ax.set_ylabel('$\\dot{F_y}$ vs $\\dot{F_z}$',rotation=0,labelpad=20) for loc,ii in enumerate(range(0,10,2)): ax = f.add_subplot(3, 5, loc + 1+10) h=ax.pcolormesh(bins_ROTdot[0], bins_ROTdot[1], R_ROTdot[ii], cmap='OrRd', edgecolors='None', vmin=0, vmax=colormax) ax.set_xlim(bins_ROTdot[0][edgesx_ROTdot]) ax.set_ylim(bins_ROTdot[1][edgesy_ROTdot]) ax.axvline(color='k', linewidth=1) ax.axhline(color='k', linewidth=1) if ii==0: ax.set_ylabel('$\\dot{\\theta}$ vs $\\dot{\\phi}$',rotation=0,labelpad=20) plt.suptitle('{}'.format(id)) plt.colorbar(h) plt.pause(0.1) if p_save is not None: plt.savefig(fig_name,dpi=DPI_RES) plt.close('all') return(None)
def onset_tuning(blk, unit_num=0, use_zeros=True): ''' Calculate the onset velocity in both terms of CP and in terms of rotation. Calculate the relationship between the onset firing rate and the different velcocities :param blk: :param unit_num: :param use_zeros: :return V_cp_fit,V_rot_fit: ''' use_flags = neoUtils.concatenate_epochs(blk) trains = spikeAnalysis.get_contact_sliced_trains(blk, unit_num)[-1] apex = neoUtils.get_contact_apex_idx(blk) * pq.ms apex_idx = apex.magnitude.astype('int') id = neoUtils.get_root(blk, unit_num) # get MB and FB at apex M = neoUtils.get_var(blk) MB = neoUtils.get_MB_MD(M)[0] MB_contacts = neoUtils.get_analog_contact_slices(MB, use_flags) MB_apex = neoUtils.get_value_at_idx(MB_contacts, apex_idx).squeeze() MB_dot = MB_apex / apex F = neoUtils.get_var(blk, 'F') FB = neoUtils.get_MB_MD(F)[0] FB_contacts = neoUtils.get_analog_contact_slices(FB, use_flags) FB_apex = neoUtils.get_value_at_idx(FB_contacts, apex_idx).squeeze() FB_dot = FB_apex / apex # Get onset FR onset_counts = np.array([ len(train.time_slice(train.t_start, train.t_start + dur)) for train, dur in zip(trains, apex) ]) onset_FR = np.divide(onset_counts, apex) onset_FR.units = 1 / pq.s # get V_onset_rot V_rot, _, D = worldGeometry.get_onset_velocity(blk) dir_idx, dir_angle = worldGeometry.get_contact_direction(blk, False) if dir_idx is -1: return (-1, -1, -1) df = pd.DataFrame() df['id'] = [id for x in xrange(MB_dot.shape[0])] df['MB'] = MB_apex df['MB_dot'] = MB_dot df['FB_dot'] = FB_dot df['FB'] = FB_apex df['rot'] = D df['rot_dot'] = V_rot df['dir_idx'] = dir_idx df['FR'] = onset_FR df['dir_angle'] = [dir_angle[x] for x in dir_idx] df = df.replace(np.inf, np.nan) df = df.dropna() # FIT: fits_all = pd.DataFrame( columns=['id', 'var', 'rvalue', 'pvalue', 'slope', 'intercept']) fits_direction = pd.DataFrame() idx = 0 idx2 = 0 for var in ['MB', 'MB_dot', 'FB', 'FB_dot', 'rot', 'rot_dot']: fit = stats.linregress(df[var], df['FR'])._asdict() fits_all.loc[idx, 'id'] = id fits_all.loc[idx, 'var'] = var for k, v in fit.iteritems(): fits_all.loc[idx, k] = v idx += 1 for direction in xrange(np.max(dir_idx) + 1): temp_idx = df['dir_idx'] == direction if not np.any(temp_idx): continue fit = stats.linregress(df[var][temp_idx], df['FR'][temp_idx])._asdict() fits_direction.loc[idx2, 'id'] = id fits_direction.loc[idx2, 'var'] = var fits_direction.loc[idx2, 'dir_idx'] = direction fits_direction.loc[idx2, 'med_dir'] = dir_angle[direction] for k, v in fit.iteritems(): fits_direction.loc[idx2, k] = v idx2 += 1 return (fits_all, fits_direction, df)
import sys import neoUtils import matplotlib.pyplot as plt import seaborn as sns import numpy as np sns.set() sns.set_style('ticks') blk = neoUtils.get_blk(sys.argv[1]) M = neoUtils.get_var(blk).magnitude sp = neoUtils.concatenate_sp(blk) cc = neoUtils.concatenate_epochs(blk, -1) Cbool = neoUtils.get_Cbool(blk) c_idx = np.where(Cbool)[0] # M[np.invert(Cbool),:] = 0 ymax = np.nanmax(M) / 4 ymin = np.nanmin(M) / 4 def shadeVector(cc, color='k'): ax = plt.gca() ylim = ax.get_ylim() for start, dur in zip(cc.times.magnitude, cc.durations.magnitude): ax.fill([start, start, start + dur, start + dur], [ylim[0], ylim[1], ylim[1], ylim[0]], color, alpha=0.1) for ii in xrange(len(sp)):