def test_pulsefish(): samplerate = 44100.0 data = pulsefish_eods('Biphasic', 200.0, samplerate, 5.0, noise_std=0.02) pi, _ = detect_peaks(data, 1.0) mean_eod, eod_times = ea.eod_waveform(data, samplerate, pi/samplerate) mean_eod, props, peaks, power = ea.analyze_pulse(mean_eod, eod_times) fig = plt.figure() ax = fig.add_subplot(1, 1, 1) ea.plot_pulse_spectrum(ax, power, props) fig.savefig('pulse.png') assert_true(os.path.exists('pulse.png'), 'plotting failed') os.remove('pulse.png')
def extract_eod_times(data, peakwidth): thresh = np.mean(np.abs(data)) * 2 pk, tr = ed.detect_peaks(data, thresh) if len(pk) == 0: return [], [], [], [] else: peaks = pth.makeeventlist(pk, tr, data, peakwidth) peakindices, _, _ = pth.discardnearbyevents(peaks[0], peaks[2], peakwidth) return peaks[0][peakindices.astype('int')].astype('int'), peaks[-1][ peakindices.astype('int')].astype('int'), peaks[2][ peakindices.astype('int')], peaks[3][peakindices.astype('int')]
def test_detect_peaks(): # generate data: time = np.arange(0.0, 10.0, 0.01) data = np.zeros(time.shape) pt_indices = np.random.randint(5, len(data) - 10, size=40) pt_indices.sort() while np.any(np.diff(pt_indices).min() < 5): pt_indices = np.random.randint(5, len(data) - 10, size=40) pt_indices.sort() peak_indices = pt_indices[0::2] trough_indices = pt_indices[1::2] n = pt_indices[0] data[0:n] = 0.1 + 0.9 * np.arange(0.0, n) / n up = False for i in range(len(pt_indices) - 1): n = pt_indices[i + 1] - pt_indices[i] if up: data[pt_indices[i]:pt_indices[i + 1]] = np.arange(0.0, n) / n else: data[pt_indices[i]:pt_indices[i + 1]] = 1.0 - np.arange(0.0, n) / n up = not up n = len(data) - pt_indices[-1] if up: data[pt_indices[-1]:] = 0.8 * np.arange(0.0, n) / n else: data[pt_indices[-1]:] = 1.0 - 0.8 * np.arange(0.0, n) / n up = not up data += -0.025 * time * (time - 10.0) peak_times = time[peak_indices] trough_times = time[trough_indices] threshold = 0.5 min_thresh = 0.3 assert_raises(ValueError, ed.detect_peaks, data, 0.0) assert_raises(ValueError, ed.detect_peaks, data, -1.0) peaks, troughs = ed.detect_peaks(data, threshold) assert_true( np.all(peaks == peak_indices), "detect_peaks(data, threshold) did not correctly detect peaks") assert_true( np.all(troughs == trough_indices), "detect_peaks(data, threshold) did not correctly detect troughs")
def analyze_pulse_data(filepath, deltat=10, thresh=0.04, starttime=0, endtime=0, savepath=False, save=False, npmmp=False, plot_steps=False, plot_result=False): """ analyzes timeseries of a pulse fish EOD recording Parameters ---------- filepath: WAV-file with the recorded timeseries deltat: int, optional time for a single analysisblock (recommended less than a minute, due to principal component clustering on the EOD-waveforms) thresh: float, optional minimum threshold for the peakdetection (if computing frequencies recommended a tiny bit lower than the wished threshold, and instead discard the EOD below the wished threshold after computing the frequencies for each EOD.) starttime: int or, str of int, optional time into the data from where to start the analysis, seconds. endtime: int or str of int, optional time into the data where to end the analysis, seconds, larger than starttime. savepath = Boolean or str, optional path to where to save results and intermediate result, only needed if save or npmmp is True. string to specify a relative path to the directory where results and intermediate results will bed or False to use preset savepath, which is ~/filepath/ or True to specify savepath as input when the script is running save: Boolean, optional True to save the results into a npy file at the savepath npmmp: Boolean, optional True to save intermediate results into a npmmp at the savepath, only recommended in case of memory overflow plot_steps: Boolean, optional True to plot the results of each analysis block plot_results: Boolean, optional True to plot the results of the final analysis. Not recommended for long recordings due to %TODO Returns ------- eods: numpy array 2D numpy array. first axis: attributes of an EOD (x (datapoints), y (recorded voltage), height (difference from maximum to minimum), class), second axis: EODs in chronological order. """ # parameters for the analysis thresh = 0.04 # minimal threshold for peakdetection peakwidth = 20 # width of a peak and minimal distance between two EODs # basic parameters for thunderfish.dataloader.open_data verbose = 0 channel = 0 ultimate_threshold = thresh + 0.01 startblock = 0 starttime = int(starttime) endtime = int(endtime) timegiven = False if endtime > starttime >= 0: timegiven = True peaks = np.array([]) troughs = np.array([]) filename = path_leaf(filepath) eods_len = 0 if savepath == False: datasavepath = filename[:-4] elif savepath == True: datasavepath = input( 'With the option npmmp enabled, a numpy memmap will be saved to: ' ).lower() else: datasavepath = savepath if save and ( os.path.exists(datasavepath + "/eods8_" + filename[:-3] + "npy") or os.path.exists(datasavepath + "/eods5_" + filename[:-3] + "npy")): print( 'there already exists an analyzed file, aborting. Change the code if you don\'t want to abort' ) quit() if npmmp: #proceed = input('With the option npmmp enabled, a numpy memmap will be saved to ' + datasavepath + '. continue? [y/n] ').lower() proceed = 'y' if proceed != 'y': quit() # starting analysis with open_data(filepath, channel, deltat, 0.0, verbose) as data: samplerate = data.samplerate # selected time interval if timegiven == True: parttime1 = starttime * samplerate parttime2 = endtime * samplerate data = data[parttime1:parttime2] #split data into blocks nblock = int(deltat * samplerate) if len(data) % nblock != 0: blockamount = len(data) // nblock + 1 else: blockamount = len(data) // nblock print('blockamount: ', blockamount) progress = 0 print(progress, '%', flush=True, end=" ") #fish = ProgressFish(total = blockamount) pca_cur = 0 for idx in range(0, blockamount): blockdata = data[idx * nblock:(idx + 1) * nblock] if progress < (idx * 100 // blockamount): progress = (idx * 100) // blockamount progressstr = ' Filestatus: ' # fish.animate(amount = idx, dexextra = progressstr) # delete peaks under absolute threshold #thresh_array = create_threshold_array(blockdata,30000,thresh) pk, tr = detect_peaks(blockdata, thresh) troughs = tr if len(pk) > 3: peaks = makeeventlist(pk, tr, blockdata, peakwidth) peakindices, peakx, peakh = discardnearbyevents( peaks[0], peaks[1], peakwidth) peaks = peaks[:, peakindices] if len(peaks) > 0: #if idx > startblock: # # adding a new block as copy of old list, only difference is peak indexing as it refers to last block # peaklist = connect_blocks(peaklist) #else: # peaklist = Peaklist([]) aligned_snips, snip_heights = cut_snippets(blockdata, peaks[0], 30, int_met="cubic", int_fact=10, max_offset=20) pols = chebyshev(aligned_snips) feats = np.zeros((pols.shape[0], pols.shape[1] + 1)) feats[:, :6] = pols feats[:, -1] = snip_heights * 0.1 #pcs, pca_cur = pc(aligned_snips) #pc_refactor(aligned_snips) minpeaks = 3 if deltat < 2 else 10 labels, clusters = cluster_events(feats, peaks, 0.1, minpeaks, False, method='DBSCAN') peaks = np.append(peaks, [labels], axis=0) if idx > startblock: # instead of the peaklist I would have to add the previous cluster means # alignclusterlabels(labels, peaklist, peaks,data=blockdata) print("maxlabel") print(maxlabel) peaks[-1] = alignlabels(labels, clusters, old_labels, old_clusters, maxlabel) old_labels = np.unique(peaks[-1]) old_clusters = clusters #I would want peaks updated here to have the right pc classes as well.. #peaks, peaklist = ampwalkclassify3_refactor(peaks, peaklist) # classification by amplitude minlen = 5 peaks = discard_short_classes(peaks, minlen) if len(peaks[0]) > 0: peaks = discard_wave_pulses(peaks, blockdata) # delete peaks under absolute threshold #thresh_array = create_threshold_array(blockdata,30000) #peaks = peaks[:,peaks[1]>thresh_array[list(map(int,peaks[0]))]] if plot_steps == True: plot_events_on_data(peaks, blockdata) pass cmap = plt.get_cmap('jet') colors = cmap(np.linspace(0, 1.0, 6)) for lab, color in zip(np.unique(labels), colors): if lab == -1: c = 'k' z = -1 else: c = color z = 1 plt.plot(range(aligned_snips.shape[1]), np.transpose(aligned_snips[labels == lab]), color=c, zorder=z, label=lab) plt.xlabel('time [ms]') plt.ylabel('signal') handles, labels = plt.gca().get_legend_handles_labels() by_label = OrderedDict(zip(labels, handles)) plt.legend(by_label.values(), by_label.keys()) #plt.legend() plt.show() print(feats.shape) for lab, color in zip(np.unique(labels), colors): if lab == -1: c = 'k' z = -1 else: c = color z = 1 plt.plot(range(feats.shape[1]), np.transpose(feats[labels == lab]), color=c, zorder=z, label=lab) plt.xlabel('time [ms]') plt.ylabel('signal') handles, labels = plt.gca().get_legend_handles_labels() by_label = OrderedDict(zip(labels, handles)) plt.legend(by_label.values(), by_label.keys()) #plt.legend() plt.show() #peaklist.len = nblock worldpeaks = np.copy(peaks) worldpeaks[0] = worldpeaks[0] + (idx * nblock) # delete the classification that only considers wave shape. #thisblock_eods = np.delete(worldpeaks,3,0) thisblock_eods = worldpeaks if idx == startblock: maxlabel = np.max(peaks[-1]) + 1 else: print("new max candidate") print(np.max(peaks[-1]) + 1) maxlabel = np.max([maxlabel, (np.max(peaks[-1]) + 1)]) if npmmp: if idx == startblock: if not os.path.exists(datasavepath): os.makedirs(datasavepath) mmpname = "eods_" + filename[:-3] + "npmmp" # save the peaks of the current buffered part to a numpy-memmap on the disk save_EOD_events_to_npmmp(thisblock_eods, eods_len, idx == startblock, datasavepath, mmpname) eods_len += len(thisblock_eods[0]) else: if idx > 0: all_eods = np.concatenate( (all_eods, thisblock_eods), axis=1) else: all_eods = thisblock_eods #plot_events_on_data(all_eods,data) print( 'returnes analyzed EODS. Calculate frequencies using all of these but discard the data from the EODS within the lowest few percent of amplitude' ) if npmmp: all_eods = np.memmap(datasavepath + '/' + mmpname, dtype='float64', mode='r+', shape=(4, eods_len), order='F') if save == 1: path = filename[:-4] + "/" if not os.path.exists(path): os.makedirs(path) if eods_len > 0: np.save(datasavepath + "/eods8_" + filename[:-3] + "npy", all_eods) print('Saved!') else: print('not saved') return all_eods
def analyze_long_pulse_data_file(filepath,save=0,plot_steps=0,new=1,starttime = 0, endtime = 0): """ analyzes timeseries of a pulse fish EOD recording """ # Script to detect and classify EODs in recordings of weakly electric pulse # fish, Dexter Früh, 2018 # # results will be saved in workingdirectory/recording/ # # input: # - [Recorded Timeseries] recording.WAV # outputs(optional): # - [Detected and Classified EODs] # (Numpy Array with Shape (Number of EODs, 4 (Attributes of EODs)), # with the EOD-Attributes # - x-location of the EOD # (time/x-coordinate/datapoint in recording) # - y-location of the EOD # (Amplitude of the positive peak of the pulse-EOD) # - height of the EOD(largest distance between peak and through in the EOD) # - class of the EOD # eods_recording.npy # - [plots of the results of each analyse step for each # analysepart (timeinterval of length = deltat) of the recording] # # required command line arguments at function call # - save : if True, save the results to a numpy file (possibly # overwrite existing) # - plot : if True, plot results in each analysestep # - new : if True, do a new analysis of the recording, even if there # is an existing analyzed .npy file with the right name. # # parameters for the analysis deltat = 30.0 # seconds of buffer size thresh = 0.04 # minimal threshold for peakdetection peakwidth = 20 # width of a peak and minimal distance between two EODs # basic parameters for thunderfish.dataloader.open_data verbose = 0 channel = 0 ultimate_threshold = thresh+0.01 startblock = 0 # timeinterval to analyze other than the whole recording #starttime = 0 #endtime = 0 #timegiven = 0 home = os.path.expanduser('~') os.chdir(home) new = int(sys.argv[4]) save = int(sys.argv[2]) plot = int(sys.argv[3]) starttime = int(starttime) endtime = int(endtime) timegiven = False if endtime > starttime>=0: timegiven = True peaks = np.array([]) troughs = np.array([]) filename = path_leaf(filepath) datasavepath = filename[:-4] proceed = input('Currently operates in home directory. If given a pulsefish recording filename.WAV, then a folder filename/ will be created in the home directory and all relevant files will be stored there. continue? [y/n] ').lower() if proceed != 'y': quit() if not os.path.exists(datasavepath): os.makedirs(datasavepath) if save == 1: print('files will be saved to: ', datasavepath) eods_len = 0 # starting analysis if new == 1 or not os.path.exists(filename[:-4]+"/eods5_"+filename[:-3]+"npy"): if filepath != home+ '/'+ datasavepath+'/'+filename: print(filepath, datasavepath+'/'+filename) proceed = input('Copy datafile to '+ datasavepath+ ' where all the other files will be stored? [y/n] ').lower() if proceed == 'y': copy2(filepath,datasavepath) # import data with open_data(filepath, channel, deltat, 0.0, verbose) as data: samplerate = data.samplerate nblock = int(deltat*data.samplerate) # selected time interval if timegiven == True: parttime1 = starttime*samplerate parttime2 = endtime*samplerate data = data[parttime1:parttime2] #split data into blocks if len(data)%nblock != 0: blockamount = len(data)//nblock + 1 else: blockamount = len(data)//nblock # progress bar print('blockamount: ' , blockamount) progress = 0 print(progress, '%' , flush = True, end = " ") #fish = ProgressFish(total = blockamount) # blockwise analysis for idx in range(0, blockamount): blockdata = data[idx*nblock:(idx+1)*nblock] # progressbar if progress < (idx*100 //blockamount): progress = (idx*100)//blockamount progressstr = ' Filestatus: ' #fish.animate(amount = idx, dexextra = progressstr) #---analysis----------------------------------------------------------------------- # step1: detect peaks in timeseries pk, tr = detect_peaks(blockdata, thresh) troughs = tr # continue with analysis only if multiple peaks are detected if len(pk) > 3: peaks = makeeventlist(pk,tr,blockdata,peakwidth) #plot_events_on_data(peaks, blockdata) peakindices, peakx, peakh = discardnearbyevents(peaks[0],peaks[1],peakwidth) peaks = peaks[:,peakindices] if len(peaks) > 0: # used to connect the results of the current block with the previous if idx > startblock: peaklist = connect_blocks(peaklist) else: peaklist = Peaklist([]) aligned_snips = cut_snippets(blockdata,peaks[0], 15, int_met = "cubic", int_fact = 10,max_offset = 1.5) pcs = pc(aligned_snips)#pc_refactor(aligned_snips) order = 5 minpeaks = 3 if deltat < 2 else 10 labels = cluster_events(pcs, peaks, order, 0.4, minpeaks, False, method = 'DBSCAN') peaks = np.append(peaks,[labels], axis = 0) #plot_events_on_data(peaks, blockdata) num = 1 if idx > startblock: alignclusterlabels(labels, peaklist, peaks,data=blockdata) peaks, peaklist = ampwalkclassify3_refactor(peaks, peaklist) # classification by amplitude minlen = 6 # >=1 peaks = discard_short_classes(peaks, minlen) if len(peaks[0]) > 0: peaks = discard_wave_pulses(peaks, blockdata) # plots the data part and its detected and classified peaks if plot_steps == True: plot_events_on_data(peaks, blockdata) pass worldpeaks = np.copy(peaks) # change peaks location in the buffered part to the location relative to the peaklist.len = nblock # peaklocations relative to whole recording worldpeaks[0] = worldpeaks[0] + (idx*nblock) thisblock_eods = np.delete(peaks,3,0) # save the peaks of the current buffered part to a numpy-memmap on the disk mmpname = "eods_"+filename[:-3]+"npmmp" save_EOD_events_to_npmmp(thisblock_eods,eods_len,idx==startblock,datasavepath,mmpname) eods_len += len(thisblock_eods[0]) # after the last buffered part has finished, save the memory mapped # numpy file of the detected and classified EODs to a .npy file to the # disk eods = np.memmap(datasavepath+"/eods_"+filename[:-3]+"npmmp", dtype='float64', mode='r+', shape=(4,eods_len), order = 'F') if save == 1: path = datasavepath+"/" if not os.path.exists(path): os.makedirs(path) if eods_len > 0: print('Saved!') np.save(datasavepath+"/eods8_"+datasavepath+"npy", eods) else: #np.save(filename[:-4]+"/eods5_"+filename[:-3]+"npy", thisblock_eods) print('not saved') else: # if there already has been a certain existing result file and 'new' was set to False print('already analyzed') print('returnes analyzed EODS. Calculate frequencies using all of these but discard the data from the EODS within the lowest few percent of amplitude') return eods
def analyze_pulse_data(filepath, deltat=30, thresh=0.04, starttime = 0, endtime = 0, savepath = False,save=False, npmmp = False, plot_steps=False, plot_result=False): """ analyzes timeseries of a pulse fish EOD recording Parameters ---------- filepath: WAV-file with the recorded timeseries deltat: int, optional time for a single analysisblock (recommended less than a minute, due to principal component clustering on the EOD-waveforms) thresh: float, optional minimum threshold for the peakdetection (if computing frequencies recommended a tiny bit lower than the wished threshold, and instead discard the EOD below the wished threshold after computing the frequencies for each EOD.) starttime: int or, str of int, optional time into the data from where to start the analysis, seconds. endtime: int or str of int, optional time into the data where to end the analysis, seconds, larger than starttime. savepath = Boolean or str, optional path to where to save results and intermediate result, only needed if save or npmmp is True. string to specify a relative path to the directory where results and intermediate results will bed or False to use preset savepath, which is ~/filepath/ or True to specify savepath as input when the script is running save: Boolean, optional True to save the results into a npy file at the savepath npmmp: Boolean, optional True to save intermediate results into a npmmp at the savepath, only recommended in case of memory overflow plot_steps: Boolean, optional True to plot the results of each analysis block plot_results: Boolean, optional True to plot the results of the final analysis. Not recommended for long recordings due to %TODO Returns ------- eods: numpy array 2D numpy array. first axis: attributes of an EOD (x (datapoints), y (recorded voltage), height (difference from maximum to minimum), class), second axis: EODs in chronological order. """ # parameters for the analysis thresh = 0.04 # minimal threshold for peakdetection peakwidth = 20 # width of a peak and minimal distance between two EODs # basic parameters for thunderfish.dataloader.open_data verbose = 0 channel = 0 ultimate_threshold = thresh+0.01 startblock = 0 # timeinterval to analyze other than the whole recording #starttime = 0 #endtime = 0 #timegiven = 0 #save = int(save) #plot_steps = int(plot_steps) starttime = int(starttime) endtime = int(endtime) timegiven = False if endtime > starttime>=0: timegiven = True peaks = np.array([]) troughs = np.array([]) filename = path_leaf(filepath) eods_len = 0 if savepath==False: datasavepath = filename[:-4] elif savepath==True: datasavepath = input('With the option npmmp enabled, a numpy memmap will be saved to: ').lower() else: datasavepath=savepath if save and (os.path.exists(datasavepath+"/eods8_"+filename[:-3]+"npy") or os.path.exists(datasavepath+"/eods5_"+filename[:-3]+"npy")): print('there already exists an analyzed file, aborting. Change the code if you don\'t want to abort') quit() if npmmp: #proceed = input('With the option npmmp enabled, a numpy memmap will be saved to ' + datasavepath + '. continue? [y/n] ').lower() proceed = 'y' if proceed != 'y': quit() # starting analysis with open_data(filepath, channel, deltat, 0.0, verbose) as data: samplerate = data.samplerate # selected time interval if timegiven == True: parttime1 = starttime*samplerate parttime2 = endtime*samplerate data = data[parttime1:parttime2] #split data into blocks nblock = int(deltat*samplerate) if len(data)%nblock != 0: blockamount = len(data)//nblock + 1 else: blockamount = len(data)//nblock print('blockamount: ' , blockamount) progress = 0 print(progress, '%' , flush = True, end = " ") #fish = ProgressFish(total = blockamount) for idx in range(0, blockamount): blockdata = data[idx*nblock:(idx+1)*nblock] if progress < (idx*100 //blockamount): progress = (idx*100)//blockamount progressstr = ' Filestatus: ' #fish.animate(amount = idx, dexextra = progressstr) pk, tr = detect_peaks(blockdata, thresh) troughs = tr if len(pk) > 3: peaks = makeeventlist(pk,tr,blockdata,peakwidth) peakindices, peakx, peakh = discardnearbyevents(peaks[0],peaks[1],peakwidth) peaks = peaks[:,peakindices] if len(peaks) > 0: if idx > startblock: peaklist = connect_blocks(peaklist) else: peaklist = Peaklist([]) aligned_snips = cut_snippets(blockdata,peaks[0], 15, int_met = "cubic", int_fact = 10,max_offset = 1.5) print(aligned_snips.shape) pcs = pc(aligned_snips)#pc_refactor(aligned_snips) order = 5 minpeaks = 3 if deltat < 2 else 10 labels = cluster_events(pcs, peaks, order, 0.4, minpeaks, False, method = 'DBSCAN') peaks = np.append(peaks,[labels], axis = 0) #plot_events_on_data(peaks, blockdata) num = 1 if idx > startblock: alignclusterlabels(labels, peaklist, peaks,data=blockdata) peaks, peaklist = ampwalkclassify3_refactor(peaks, peaklist) # classification by amplitude minlen = 6 peaks = discard_short_classes(peaks, minlen) if len(peaks[0]) > 0: peaks = discard_wave_pulses(peaks, blockdata) if plot_steps == True: plot_events_on_data(peaks, blockdata) pass peaklist.len = nblock worldpeaks = np.copy(peaks) worldpeaks[0] = worldpeaks[0] + (idx*nblock) thisblock_eods = np.delete(worldpeaks,3,0) if npmmp: if idx == startblock: if not os.path.exists(datasavepath): os.makedirs(datasavepath) mmpname = "eods_"+filename[:-3]+"npmmp" # save the peaks of the current buffered part to a numpy-memmap on the disk save_EOD_events_to_npmmp(thisblock_eods,eods_len,idx==startblock,datasavepath,mmpname) eods_len += len(thisblock_eods[0]) else: if idx > 0: all_eods = np.concatenate((all_eods,thisblock_eods),axis = 1) else: all_eods = thisblock_eods #plot_events_on_data(all_eods,data) print('returnes analyzed EODS. Calculate frequencies using all of these but discard the data from the EODS within the lowest few percent of amplitude') if npmmp: all_eods = np.memmap(datasavepath+'/'+mmpname, dtype='float64', mode='r+', shape=(4,eods_len), order = 'F') if save == 1: path = filename[:-4]+"/" if not os.path.exists(path): os.makedirs(path) if eods_len > 0: np.save(datasavepath+"/eods8_"+filename[:-3]+"npy", all_eods) print('Saved!') else: print('not saved') return all_eods
def detect_calls(self, det_range=(50000, 180000), th_between_calls=0.004, plot_debug=False, plot_in_spec=False, save_spec_w_calls=False): # Get an average over all frequency channels within detection range av_power = np.mean(self.spec_mat[np.logical_and( self.f > det_range[0], self.f < det_range[1])], axis=0) th = np.min( av_power ) # THIS THRESHOLD ROCKS YOUR PANTS! for more detections, increase f_res. 2^7 or 2^8 if th <= 0: # Fix cases where th <= 0 th = np.mean(av_power) peaks, _ = detect_peaks(av_power, th) # Use thunderfish's peak-trough algorithm # clean pks that might be echoes below_t_th = np.diff(self.t[peaks]) < th_between_calls if len(np.where(below_t_th)[0]) == 0: cleaned_peaks = peaks else: cleaned_peaks = np.delete(peaks, np.where(below_t_th)[0]) if plot_debug: fig, ax = plt.subplots() ax.plot(self.t, av_power) ax.plot(self.t[cleaned_peaks], np.ones(len(cleaned_peaks)) * np.max(av_power), 'o', ms=10, color='darkred', alpha=.8, mec='k', mew=1.5) ax.plot([self.t[0], self.t[-1]], [th, th], '--k', lw=2.5) # plt.show() if plot_in_spec: spec_fig, spec_ax = self.plot_spectrogram(spec_mat=self.spec_mat, f_arr=self.f, t_arr=self.t, ret_fig_and_ax=True, showit=False) spec_ax = spec_ax[0] spec_ax.plot( self.t[cleaned_peaks], np.ones(len(cleaned_peaks)) * 80, 'o', ms=10, # plots the detection at 80kHz color='darkred', alpha=.8, mec='k', mew=1.5) spec_fig.suptitle(self.file_name.split('.')[0]) if save_spec_w_calls: spec_fig.savefig('test_result/detected_calls/' + self.file_name.split('.')[0] + '.pdf') return av_power, cleaned_peaks
mainHarmonicTrace = [] db_th = 15.0 f_tol_th = 40000 # in Hz t_tol_th = 0.0012 # in s freq_tolerance = np.where( np.cumsum(np.diff(freqs_of_filtspec)) > f_tol_th)[0][0] time_tolerance = np.where(np.cumsum(np.diff(t)) > t_tol_th)[0][0] # first start from peak to right f_ref = peak_f_idx[0] t_ref = peak_f_idx[1] mainHarmonicTrace.append([peak_f_idx[0], peak_f_idx[1]]) for ri in right_from_pk: pi, _ = detect_peaks(filtered_spec[:, ri], db_th) pi = pi[filtered_spec[pi, ri] > lowest_decibel] if len(pi) > 0: curr_f = pi[np.argmin(np.abs(f_ref - pi))] if np.abs(ri - t_ref) > time_tolerance or np.abs(curr_f - f_ref) > freq_tolerance \ or f_ref - curr_f < 0: continue else: mainHarmonicTrace.append([curr_f, ri]) f_ref = curr_f t_ref = ri else: continue # Now from peak to left
def extract_peak_and_th_crossings_from_cumhist(mat, axis, label_array, perc_th=70, neg_sweep_slope=True, plot_debug=False): av = np.mean(mat, axis=axis) # mean over all frequency channels abs_av = av - np.min( av) # make all values positive for the peak-det-algorithm to work perc = np.percentile(abs_av, perc_th) # if axis == 1: # ToDo: need a cleaner way to solve the artifacts issue in my recordings # abs_av[np.logical_or((label_array < 98000.), (label_array > 159000.))] = 0. thresh = np.min(abs_av) # threshold for the peak-detector if thresh <= 0: # Fix cases where th <= 0 thresh = np.mean(abs_av) pks, trs = detect_peaks(abs_av, thresh) if len(pks) == 0: return [], [] # since more than one peak might be detected, need to choose the one with the highest power mx_pk = pks[np.argmax(abs_av[pks])] crossings = np.where( np.diff(abs_av > perc))[0] # gives the crossings where abs_av>perc_th # now I extract the sign of crossing differences to the peak. 0 marks the right crossings sign_to_pk = np.sign(label_array[crossings] - label_array[mx_pk]) # look for the crossings pair where the peak is in the middle of both try: call_crossing_idx = np.where(sign_to_pk[:-1] + sign_to_pk[1:] == 0)[0][0] except IndexError: embed() quit() call_boundaries = crossings[call_crossing_idx:call_crossing_idx + 2] if plot_debug: fig, ax = plt.subplots() ax.plot(label_array, abs_av) ax.plot(label_array[mx_pk], abs_av[mx_pk], 'or', ms=12, mec='k', mew=1.5, alpha=0.7) ax.plot([label_array[0], label_array[-1]], [perc, perc], '-k', alpha=0.8) ax.plot(label_array[call_boundaries], abs_av[call_boundaries], 'o', color='gray', ms=20, mec='k', mew=2, alpha=.7) if np.logical_and(axis == 1, neg_sweep_slope): return mx_pk, call_boundaries[::-1] else: return mx_pk, call_boundaries