def expand_on_bends(df_trl, Fs=500, tPre_ms=100, bendThr=10, minLat_ms=5, maxGap_ms=100): """Takes dataframe where each row contains single trial information and expands such that each row contains single bend information Parameters ---------- df_trl: pandas dataframe, (nTrlsInTotal, nVariables) Fs: int Sampling frequency when collecting data(images) nPre_ms: scalar """ import apCode.SignalProcessingTools as spt minPkDist = int((10e-3) * Fs) nPre = tPre_ms * 1e-3 * Fs minLat = minLat_ms * 1e-3 * Fs maxGap = maxGap_ms * 1e-3 * Fs df_bend = [] for iTrl in np.unique(df_trl.trlIdx_glob): df_now = df_trl.loc[df_trl.trlIdx_glob == iTrl] y = df_now.iloc[0]['tailAngles'][-1] y = spt.chebFilt(y, 1 / Fs, (5, 60), btype='bandpass') pks = spt.findPeaks(y, thr=bendThr, thrType='rel', pol=0, minPkDist=minPkDist)[0] if len(pks) > 3: dpks = np.diff(pks) tooSoon = np.where(pks < (nPre + minLat))[0] tooSparse = np.where(dpks > maxGap)[0] + 1 inds_del = np.union1d(tooSoon, tooSparse) pks = np.delete(pks, inds_del, axis=0) if len(pks) > 3: nBends = len(pks) bendIdx = np.arange(nBends) bendSampleIdxInTrl = pks bendAmp = y[pks] bendAmp_abs = np.abs(bendAmp) bendAmp_rel = np.insert(np.abs(np.diff(bendAmp)), 0, np.abs(bendAmp[0])) bendInt_ms = np.gradient(pks) * (1 / Fs) * 1000 onset_ms = (pks[0] - nPre + 1) * (1 / Fs) * 1000 else: nBends = 0 bendIdx, bendAmp, bendAmp_abs, bendAmp_rel, bendInt_ms =\ [np.nan for _ in range(5)] bendsampleIdxInTrl, onset_ms = [np.nan for _ in range(2)] dic = dict(trlIdx_glob=iTrl, nBends=nBends, bendIdx=bendIdx, bendSampleIdxInTrl=bendSampleIdxInTrl, bendAmp=bendAmp, bendAmp_abs=bendAmp_abs, bendAmp_rel=bendAmp_rel, bendInt_ms=bendInt_ms, onset_ms=onset_ms) df_now = pd.DataFrame(dic) df_bend.append(df_now) df_bend = pd.concat(df_bend, ignore_index=True) return pd.merge(df_trl, df_bend, on='trlIdx_glob')
#%% Save img stack in specified dir # saveDir = r'S:\Avinash\Ablations and behavior\GrpData\Session 20170121\blah' saveDir = os.path.join(inputDir, 'proc') imgName = 'K-means clustering of Alx cells_clstrs unord' + '.tif' tff.imsave(os.path.join(saveDir, imgName), np.transpose(imgStack, [0, 3, 1, 2])) #%% REGRESSION import apCode.AnalyzeEphysData as aed # Chose # 2 after visual inspection (Note: Run Kmeans with k-means++ # init and unordered clusters before runing this) centroid_M = centroids[0, :] dt = data['time'][2] - data['time'][1] centroid_M = spt.zscore(spt.chebFilt(centroid_M, dt, 0.01, btype='high')) thr_Ca = volt.getGlobalThr(centroid_M) pks = spt.findPeaks(centroid_M, thr=thr_Ca, minPkDist=30) plt.figure(figsize=(16, 6)) plt.style.use('dark_background') plt.subplot(131) plt.plot(data['time'], centroid_M) plt.plot(data['time'][pks[0]], centroid_M[pks[0]], 'ro') plt.xlim(100, 500) plt.xlabel('Time (s)') plt.ylabel('dF/F') plt.title('Some stim-elicited responses in chosen centroid' '\n Shown peaks used to average responses') centroid_M_seg = aed.SegmentDataByEvents(centroid_M, pks[0], 20, 50, axis=0) trlLens = np.array([len(trl) for trl in centroid_M_seg])
#%% Save img stack in specified dir # saveDir = r'S:\Avinash\Ablations and behavior\GrpData\Session 20170121\blah' saveDir = os.path.join(inputDir, 'proc') imgName = 'K-means clustering of Alx cells_clstrs unord' + '.tif' tff.imsave(os.path.join(saveDir,imgName),np.transpose(imgStack,[0,3,1,2])) #%% REGRESSION import apCode.AnalyzeEphysData as aed # Chose # 2 after visual inspection (Note: Run Kmeans with k-means++ # init and unordered clusters before runing this) centroid_M = centroids[0,:] dt = data['time'][2] - data['time'][1] centroid_M = spt.zscore(spt.chebFilt(centroid_M,dt,0.01,btype='high')) thr_Ca = volt.getGlobalThr(centroid_M) pks = spt.findPeaks(centroid_M,thr=thr_Ca, minPkDist=30) plt.figure(figsize = (16,6)) plt.style.use('dark_background') plt.subplot(131) plt.plot(data['time'],centroid_M) plt.plot(data['time'][pks[0]],centroid_M[pks[0]],'ro') plt.xlim(100,500) plt.xlabel('Time (s)') plt.ylabel('dF/F') plt.title('Some stim-elicited responses in chosen centroid' '\n Shown peaks used to average responses') centroid_M_seg = aed.SegmentDataByEvents(centroid_M,pks[0],20,50,axis =0) trlLens = np.array([len(trl) for trl in centroid_M_seg])