def __init__(self, **traits): super(PanTomkinsDetector, self).__init__(**traits) self.ecg_ts = TimeSeries(physiodata=self.physiodata, contains="ecg") # relevant data to be plotted self.ecg_time = self.ecg_ts.time self.can_use_ecg2 = "ecg2" in self.physiodata.contents # Which ECG signal to use? if self.qrs_source_signal == "ecg" or not self.can_use_ecg2: self.ecg_signal = normalize(self.ecg_ts.data) if self.can_use_ecg2: self.ecg2_signal = normalize(self.physiodata.ecg2_data) else: self.ecg2_signal = None else: self.ecg_signal = normalize(self.physiodata.ecg2_data) self.ecg2_signal = normalize(self.physiodata.ecg_data) self.thr_times = self.ecg_time[np.array([0, -1])] self.censored_intervals = self.physiodata.censored_intervals # graphics containers self.aux_window_graphics = [] self.censored_region_graphics = [] # If peaks are already available in the mea.mat, # they have already been marked if len(self.peak_times): self.dirty = False self.peak_values = self.ecg_signal[self.peak_indices] if self.dne_peak_indices is not None and len(self.dne_peak_indices): self.dne_peak_values = self.ecg_signal[self.dne_peak_indices]
def _get_qrs_power_signal(self): if self.apply_filter: filtered_ecg = normalize( bandpass(self.ecg_signal, self.bandpass_min, self.bandpass_max, self.ecg_ts.sampling_rate)) else: filtered_ecg = self.ecg_signal # Differentiate and square the signal? if self.apply_diff_sq: # Differentiate the signal and square it diff_sq = np.ediff1d(filtered_ecg, to_begin=0)**2 else: diff_sq = filtered_ecg # If we're to apply a Moving Average smoothing if self.apply_smooth_ma: # MA smoothing smooth_ma = smooth(diff_sq, window_len=self.smoothing_window_len, window=self.smoothing_window) else: smooth_ma = diff_sq if not self.use_ECG2: # for visualization purposes return normalize(smooth_ma) logger.info("Using 2nd ECG signal") # Use secondary ECG signal and combine QRS power if self.apply_filter: filtered_ecg2 = normalize( bandpass(self.ecg2_signal, self.bandpass_min, self.bandpass_max, self.ecg_ts.sampling_rate)) else: filtered_ecg2 = self.ecg2_signal if self.apply_diff_sq: diff_sq2 = np.ediff1d(filtered_ecg2, to_begin=0)**2 else: diff_sq2 = filtered_ecg2 if self.apply_smooth_ma: smooth_ma2 = smooth(diff_sq2, window_len=self.smoothing_window_len, window=self.smoothing_window) else: smooth_ma2 = diff_sq2 return normalize(((1 - self.ecg2_weight) * smooth_ma + self.ecg2_weight * smooth_ma2)**2)
def _get_aux_signal(self): if not self.use_secondary_heartbeat: return np.array([]) sig = getattr(self.physiodata, self.secondary_heartbeat + "_data") if self.secondary_heartbeat_abs: sig = np.abs(sig) if self.secondary_heartbeat_window_len > 0: sig = smooth(sig, window_len=self.secondary_heartbeat_window_len, window=self.secondary_heartbeat_window) return normalize(sig)
def compute_slicewise_regressors(self, N_BINS=100): bold = nib.load(self.fmri_file) if self.acquisition_type == "Coronal": expected_slices = bold.shape[1] elif self.acquisition_type == "Axial": expected_slices = bold.shape[0] elif self.acquisition_type == "Saggittal": expected_slices = bold.shape[2] # Check that the number of slice timings matches matrix if not (self.slice_times_matrix.ndim == 2 and \ expected_slices == self.slice_times_matrix.shape[1]): messagebox("Acquisition of type %s requires %d slices. \ %d slice times were provided" % (self.acquisition_type, expected_slices, self.slice_times_matrix.shape[1])) raise ValueError # Only needed when slice_times_matrix is manually specified num_trs = bold.shape[3] if self.slice_times_matrix.shape[0] < num_trs: messagebox("Not enough slice timings to cover the 4d time series") raise ValueError if self.slice_times_matrix.shape[0] > num_trs: self.slice_times_matrix = self.slice_times_matrix[:num_trs, :] resp_hist, bins = np.histogram(self.resp_signal, N_BINS) resp_transfer_func = np.concatenate([[0], np.cumsum(resp_hist) / float(resp_hist.sum())]) #kernel_size = self.physiodata.z0_sampling_rate - 1 #resp_smooth = smooth(self.resp_signal,window_len=kernel_size, window="flat") resp_diff = np.ediff1d(self.resp_signal, to_begin=0) resp_phase = np.pi * resp_transfer_func[np.round( normalize(self.resp_signal) * N_BINS).astype( np.int)] * np.sign(resp_diff) # Make detrending regressors detrend_model = np.row_stack([ \ legendre(x)( np.linspace(-1,1,self.slice_times_matrix.shape[0])) \ for x in range(self.drift_model_order) ] ) detrend_columns = [ "poly%02d" % polynum for polynum in range(self.drift_model_order) ] # create the detrending # At what phase did the TR occur? regressors = [] for slicenum in range(self.slice_times_matrix.shape[1]): offsets = self.tr_onsets + self.slice_times_matrix[:, slicenum] # Both in seconds logger.info("Computing regressors for slice %d", slicenum) tr_cardiac_phase = np.array( [self.heartbeat_phase(t) for t in offsets]) tr_indices = np.array([ np.argmin(np.abs(t - self.resp_times)) for t in offsets ]).astype(np.int) tr_resp_phase = resp_phase[tr_indices] resp_regressors = fourier_expand(tr_resp_phase, self.respiration_expansion_order) columns = [] columns += [("resp_ricor_cos%d"%(n+1), "resp_ricor_sin%d"%(n+1)) for n in \ range(self.respiration_expansion_order)] cardiac_regressors = fourier_expand(tr_cardiac_phase, self.cardiac_expansion_order) columns += [("cardiac_ricor_cos%d"%(n+1), "cardiac_ricor_sin%d"%(n+1)) for n in \ range(self.cardiac_expansion_order)] mult_plus = fourier_expand(tr_cardiac_phase + tr_resp_phase, self.interaction_expansion_order) columns += [("ix_plus_ricor_cos%d"%(n+1), "ix_plus_ricor_sin%d"%(n+1)) for n in \ range(self.interaction_expansion_order)] mult_minus = fourier_expand(tr_cardiac_phase - tr_resp_phase, self.interaction_expansion_order) columns += [("ix_minus_ricor_cos%d"%(n+1), "ix_minus_ricor_sin%d"%(n+1)) for n in \ range(self.interaction_expansion_order)] columns = detrend_columns + [ item for sublist in columns for item in sublist ] data = np.row_stack([ detrend_model, resp_regressors, cardiac_regressors, mult_plus, mult_minus, ]).T regressors.append(pd.DataFrame(data=data, columns=columns)) return regressors
def detect(self): """ Implementation of the Pan Tomkins QRS detector """ logger.info("Beginning QRS Detection") t0 = time.time() # The original paper used a different method for finding peaks smoothdiff = normalize(np.ediff1d(self.qrs_power_signal, to_begin=0)) peaks = find_peaks(smoothdiff) peak_amps = self.qrs_power_signal[peaks] # Part 2: getting rid of useless peaks # ==================================== # There are lots of small, irrelevant peaks that need to # be discarded. # 2a) remove peaks occurring in censored intervals censor_peak_mask = censor_peak_times( self.ecg_ts. censored_regions, #TODO: switch to physiodata.censored_intervals self.ecg_time[peaks]) n_removed_peaks = peaks.shape[0] - censor_peak_mask.sum() logger.info("%d/%d potential peaks outside %d censored intervals", n_removed_peaks, peaks.shape[0], len(self.ecg_ts.censored_regions)) peaks = peaks[censor_peak_mask] peak_amps = peak_amps[censor_peak_mask] # 2b) if a second signal is used, make sure the ecg peaks are # near aux signal peaks if self.use_secondary_heartbeat: self.aux_windows = self.get_aux_windows() aux_mask = times_contained_in(peaks, self.aux_windows) logger.info("Using secondary signal") logger.info("%d aux peaks detected", self.aux_windows.shape[0]) logger.info("%d/%d peaks contained in second signal window", aux_mask.sum(), peaks.shape[0]) peaks = peaks[aux_mask] peak_amps = peak_amps[aux_mask] # 2c) use otsu's method to find a cutoff value peak_amp_thr = threshold_otsu(peak_amps) + self.pt_adjust otsu_mask = peak_amps > peak_amp_thr logger.info("otsu threshold: %.7f", peak_amp_thr) logger.info("%d/%d peaks survive", otsu_mask.sum(), peaks.shape[0]) peaks = peaks[otsu_mask] peak_amps = peak_amps[otsu_mask] # 3) Make sure there is only one peak in each secondary window # this is accomplished by estimating the distribution of peak # times relative to aux window starts if self.use_secondary_heartbeat: npeaks = peaks.shape[0] # obtain a distribution of times and amplitudes rel_peak_times = np.zeros_like(peaks) window_subsets = [] for start, end in self.aux_windows: mask = np.flatnonzero((peaks >= start) & (peaks <= end)) if len(mask) == 0: continue window_subsets.append(mask) rel_peak_times[mask] = peaks[mask] - start # how common are relative peak times relative to window starts densities, bins = np.histogram( rel_peak_times, range=(0, self.secondary_heartbeat_pre_msec), bins=self.secondary_heartbeat_n_likelihood_bins, density=True) likelihoods = densities[np.clip( np.digitize(rel_peak_times, bins) - 1, 0, self.secondary_heartbeat_n_likelihood_bins - 1)] _peaks = [] # Pull out the maximal peak for subset in window_subsets: # If there's only a single peak contained, no need for math if len(subset) == 1: _peaks.append(peaks[subset[0]]) continue _peaks.append(peaks[subset[np.argmax(likelihoods[subset])]]) peaks = np.array(_peaks) logger.info("Only 1 peak per aux window allowed:" + \ " %d/%d peaks remaining",peaks.shape[0],npeaks) # Check that no two peaks are too close: peak_diffs = np.ediff1d(peaks, to_begin=500) peaks = peaks[peak_diffs > 200] # Cutoff is 300BPM # Stack the peaks and see if the original data has a higher value raw_stack = peak_stack(peaks, self.ecg_signal, pre_msec=self.peak_window, post_msec=self.peak_window, sampling_rate=self.ecg_ts.sampling_rate) adj_factors = np.argmax(raw_stack, axis=1) - self.peak_window peaks = peaks + adj_factors self.peak_indices = peaks self.peak_values = self.ecg_signal[peaks] self.peak_times = self.ecg_time[peaks] self.thr_vals = np.array([peak_amp_thr] * 2) t1 = time.time() # update the scatterplot if we're interactive if self.plot_data is not None: self.plot_data.set_data("peak_times", self.peak_times) self.plot_data.set_data("peak_values", self.peak_values) self.plot_data.set_data("qrs_power", self.qrs_power_signal) self.plot_data.set_data("aux_signal", self.aux_signal) self.plot_data.set_data("thr_vals", self.thr_vals) self.plot_data.set_data("thr_times", self.thr_vals) self.plot_data.set_data( "thr_times", np.array([self.ecg_time[0], self.ecg_time[-1]])), self.update_aux_window_graphics() self.plot.request_redraw() self.image_plot_data.set_data("imagedata", self.ecg_matrix) self.image_plot.request_redraw() else: print "plot data is none" logger.info("found %d QRS complexes in %.3f seconds", len(self.peak_indices), t1 - t0) self.dirty = False