def get_all_features_hrva(data_sample, sample_rate=100, rpeak_method=0): """ :param data_sample: :param sample_rate: :param rpeak_method: :return: """ if rpeak_method in [1, 2, 3, 4]: detector = PeakDetector() peak_list = detector.ppg_detector(data_sample, rpeak_method)[0] else: rol_mean = rolling_mean(data_sample, windowsize=0.75, sample_rate=100.0) peaks_wd = detect_peaks(data_sample, rol_mean, ma_perc=20, sample_rate=100.0) peak_list = peaks_wd["peaklist"] rr_list = np.diff(peak_list) * (1000 / sample_rate) #1000 milisecond nn_list = get_nn_intervals(rr_list) nn_list_non_na = np.copy(nn_list) nn_list_non_na[np.where(np.isnan(nn_list_non_na))[0]] = -1 time_domain_features = get_time_domain_features(rr_list) frequency_domain_features = get_frequency_domain_features(rr_list) geometrical_features = get_geometrical_features(rr_list) csi_cvi_features = get_csi_cvi_features(rr_list) return time_domain_features,frequency_domain_features,\ geometrical_features,csi_cvi_features
def msq(x): """Mean signal quality""" # Library from vital_sqi.common.rpeak_detection import PeakDetector # Detection of peaks detector = PeakDetector() peak_list, trough_list = detector.ppg_detector(x, 7) # Return return sq.standard_sqi.msq_sqi(x, peaks_1=peak_list, peak_detect2=6)
def get_peak_error_features(data_sample, sample_rate=100, rpeak_detector=0, low_rri=300, high_rri=2000): rules = ["malik", "karlsson", "kamath", "acar"] try: wd, m = hp.process(data_sample, sample_rate, calc_freq=True) except: try: wd, m = hp.process(data_sample, sample_rate) except: error_dict = {rule + "_error": np.nan for rule in rules} error_dict["outlier_error"] = np.nan return error_dict if rpeak_detector in [1, 2, 3, 4]: detector = PeakDetector(wave_type='ecg') peak_list = detector.ppg_detector(data_sample, rpeak_detector, preprocess=False)[0] wd["peaklist"] = peak_list wd = calc_rr(peak_list, sample_rate, working_data=wd) wd = check_peaks(wd['RR_list'], wd['peaklist'], wd['ybeat'], reject_segmentwise=False, working_data=wd) wd = clean_rr_intervals(working_data=wd) rr_intervals = wd["RR_list"] rr_intervals_cleaned = remove_outliers(rr_intervals, low_rri=low_rri, high_rri=high_rri) number_outliers = len(np.where(np.isnan(rr_intervals_cleaned))[0]) outlier_ratio = number_outliers / (len(rr_intervals_cleaned) - number_outliers) error_sqi = {} error_sqi['outlier_error'] = outlier_ratio interpolated_rr_intervals = interpolate_nan_values(rr_intervals_cleaned) for rule in rules: nn_intervals = remove_ectopic_beats(interpolated_rr_intervals, method=rule) number_ectopics = len(np.where(np.isnan(nn_intervals))[0]) ectopic_ratio = number_ectopics / (len(nn_intervals) - number_ectopics) error_sqi[rule + "_error"] = ectopic_ratio return error_sqi
def get_split_rr_index(segment_seconds,sequence): """ handy Return the index of the splitting points :param segment_seconds: the length of each cut split (in seconds) :param sequence: :return: """ detector = PeakDetector() indices = [0] for i in range(0, int(np.ceil(len(sequence) / segment_seconds))): chunk = sequence[int(segment_seconds * i): int(segment_seconds * (i + 1) + 60)] peak_list, trough_list = detector.ppg_detector(chunk) if len(trough_list)>0: indices.append(int(trough_list[-1]+segment_seconds * i)) else: indices.append(int(segment_seconds * (i+1))) return indices
def split_segment(s, split_type=0, duration=30.0, overlaping=1,sampling_rate=100, peak_detector=7, wave_type='ppg'): """ Save segment waveform and plot (optional) to csv and image file. Input is a segment with timestamps. Parameters ---------- s : array-like represent the signal. split_type : 0: split by time 1: split by beat duration : the duration of each segment if split by time in seconds, default = 30 (second) the number of complex/beat in each segement if split by beat, default = 30(beat/segment) sampling_rate : device sampling rate peak_detector : The type of peak detection if split the segment by beat. wave_type: Type of signal. Either 'ppg' or 'ecg' Returns ------- >>>from vital_sqi.common.utils import generate_timestamp >>>s = np.arange(100000) >>>timestamps = generate_timestamp(None,100,len(s)) >>>df = pd.DataFrame(np.hstack((np.array(timestamps).reshape(-1,1), np.array(s).reshape(-1,1)))) >>>split_segment(df,overlaping=3) """ assert check_signal_format(s) is True if split_type is 0: chunk_size = int(duration * sampling_rate) chunk_step = int(overlaping * sampling_rate) chunk_indices = [ [int(i), int(i + chunk_size)] for i in range(0, len(s), chunk_size - chunk_step) ] else: if wave_type == 'ppg': detector = PeakDetector(wave_type='ppg') peak_list, trough_list = detector.ppg_detector(s,detector_type=peak_detector) else: detector = PeakDetector(wave_type='ecg') peak_list, trough_list = detector.ecg_detector(s, detector_type=peak_detector) chunk_indices = [ [peak_list[i], peak_list[i+duration]] for i in range(0, len(peak_list),int(duration-overlaping)) ] chunk_indices[0] = 0 milestones = pd.DataFrame(chunk_indices) segments = cut_segment(s, milestones) return segments, milestones
def save_segment_image(segment,saved_filename,save_img_folder,display_trough_peak): """ handy :param segment: :param saved_filename: :param save_img_folder: :param display_trough_peak: :return: """ fig = go.Figure() fig.add_traces(go.Scatter(x=np.arange(1, len(segment)), y=segment, mode="lines")) if display_trough_peak: wave = PeakDetector() systolic_peaks_idx, trough_idx = wave.detect_peak_trough_count_orig(segment) fig.add_traces(go.Scatter(x=systolic_peaks_idx, y=segment[systolic_peaks_idx], mode="markers")) fig.add_traces(go.Scatter(x=trough_idx, y=segment[trough_idx], mode="markers")) fig.update_layout( autosize=True, ) fig.write_image(os.path.join(save_img_folder, saved_filename + '.png'))
def msq_sqi(s, peak_detector_1=7, peak_detector_2=6, wave_type='ppg'): """ MSQ SQI as defined in Elgendi et al "Optimal Signal Quality Index for Photoplethysmogram Signals" with modification of the second algorithm used. Instead of Bing's, a SciPy built-in implementation is used. The SQI tracks the agreement between two peak detectors to evaluate quality of the signal. Parameters ---------- s : sequence A signal with peaks. peak_detector_1 : array of int Type of the primary peak detection algorithm, default = Billauer peak_detect2 : int Type of the second peak detection algorithm, default = Scipy Returns ------- msq_sqi : number MSQ SQI value for the given signal """ if wave_type == 'ppg': detector = PeakDetector(wave_type='ppg') peaks_1, trough_list = detector.ppg_detector( s, detector_type=peak_detector_1) peaks_2 = detector.ppg_detector(s, detector_type=peak_detector_2, preprocess=False) else: detector = PeakDetector(wave_type='ecg') peaks_1, trough_list = detector.ecg_detector( s, detector_type=peak_detector_1) peaks_2 = detector.ecg_detector(s, detector_type=peak_detector_2, preprocess=False) if len(peaks_1) == 0 or len(peaks_2) == 0: return 0.0 peak1_dom = len(np.intersect1d(peaks_1, peaks_2)) / len(peaks_1) peak2_dom = len(np.intersect1d(peaks_2, peaks_1)) / len(peaks_2) return min(peak1_dom, peak2_dom)
def get_all_features_hrva(s, sample_rate=100, rpeak_method=0,wave_type='ecg'): """ Parameters ---------- data_sample : Raw signal rpeak_method : return: (Default value = 0) sample_rate : (Default value = 100) Returns ------- """ # if rpeak_method in [1, 2, 3, 4]: # detector = PeakDetector() # peak_list = detector.ppg_detector(data_sample, rpeak_method)[0] # else: # rol_mean = rolling_mean(data_sample, windowsize=0.75, sample_rate=100.0) # peaks_wd = detect_peaks(data_sample, rol_mean, ma_perc=20, # sample_rate=100.0) # peak_list = peaks_wd["peaklist"] if wave_type=='ppg': detector = PeakDetector(wave_type='ppg') peak_list, trough_list = detector.ppg_detector(s, detector_type=rpeak_method) else: detector = PeakDetector(wave_type='ecg') peak_list, trough_list = detector.ecg_detector(s, detector_type=rpeak_method) rr_list = np.diff(peak_list) * (1000 / sample_rate) # 1000 milisecond nn_list = get_nn_intervals(rr_list) nn_list_non_na = np.copy(nn_list) nn_list_non_na[np.where(np.isnan(nn_list_non_na))[0]] = -1 time_domain_features = get_time_domain_features(rr_list) frequency_domain_features = get_frequency_domain_features(rr_list) geometrical_features = get_geometrical_features(rr_list) csi_cvi_features = get_csi_cvi_features(rr_list) return time_domain_features, frequency_domain_features, geometrical_features, csi_cvi_features
def get_all_features_heartpy(data_sample, sample_rate=100, rpeak_detector=0): """ Parameters ---------- data_sample : Raw signal sample_rate : (Default value = 100) rpeak_detector : (Default value = 0) Returns ------- """ # time domain features td_features = [ "bpm", "ibi", "sdnn", "sdsd", "rmssd", "pnn20", "pnn50", "hr_mad", "sd1", "sd2", "s", "sd1/sd2", "breathingrate" ] # frequency domain features fd_features = ["lf", "hf", "lf/hf"] try: wd, m = hp.process(data_sample, sample_rate, calc_freq=True) except Exception as e: try: wd, m = hp.process(data_sample, sample_rate) except: time_domain_features = {k: np.nan for k in td_features} frequency_domain_features = {k: np.nan for k in fd_features} return time_domain_features, frequency_domain_features if rpeak_detector in [1, 2, 3, 4]: detector = PeakDetector(wave_type='ecg') peak_list = \ detector.ppg_detector(data_sample, rpeak_detector, preprocess=False)[0] wd["peaklist"] = peak_list wd = calc_rr(peak_list, sample_rate, working_data=wd) wd = check_peaks(wd['RR_list'], wd['peaklist'], wd['ybeat'], reject_segmentwise=False, working_data=wd) wd = clean_rr_intervals(working_data=wd) rr_diff = wd['RR_list'] rr_sqdiff = np.power(rr_diff, 2) wd, m = calc_ts_measures(wd['RR_list'], rr_diff, rr_sqdiff, working_data=wd) m = calc_poincare(wd['RR_list'], wd['RR_masklist'], measures=m, working_data=wd) try: measures, working_data = calc_breathing(wd['RR_list_cor'], data_sample, sample_rate, measures=m, working_data=wd) except: measures['breathingrate'] = np.nan wd, m = calc_fd_measures(measures=measures, working_data=working_data) time_domain_features = {k: m[k] for k in td_features} frequency_domain_features = {} for k in fd_features: if k in m.keys(): frequency_domain_features[k] = m[k] else: frequency_domain_features[k] = np.nan # frequency_domain_features = {k:m[k] for k in fd_features if k in m.keys} # frequency_domain_features = {k:np.na for k in fd_features if k not in m.keys} return time_domain_features, frequency_domain_features
def test_on_detect_peak_trough_count_orig(self): detector = PeakDetector() pass
lp_cutoff_order=lp_filt_params, trim_amount=trim_amount, filter_type=filter_type, sampling_rate=sampling_rate)) print(ppg_data.signals.shape) s = np.arange(0, 1000, 1) fig, ax = plt.subplots() ax.plot(s, ppg_data.signals[0][8000:9000]) plt.show() ppg_data.update_segment_indices(sqi_sg.generate_segment_idx(segment_length=segment_length, sampling_rate=sampling_rate, signal_array=ppg_data.signals)) print(ppg_data.segments.shape) print(ppg_data.segments) detector = PeakDetector() peak_list, trough_list = detector.ppg_detector(ppg_data.signals[0][ppg_data.segments[0][0]:ppg_data.segments[0][1]], 7) # Plot results of peak detection s = np.arange(0, 3000, 1) fig, ax = plt.subplots() ax.plot(s, ppg_data.signals[0][ppg_data.segments[0][0]:ppg_data.segments[0][1]]) if len(peak_list) != 0: ax.scatter(peak_list, ppg_data.signals[0][peak_list], color="r", marker="v") if len(trough_list) != 0: ax.scatter(trough_list, ppg_data.signals[0][trough_list], color="b", marker="v") plt.show() # Plot a single period fig, ax = plt.subplots() ax.plot(ppg_data.signals[0][trough_list[0]:trough_list[1]])
def ectopic_sqi( data_sample, rule_index=0, sample_rate=100, rpeak_detector=0, wave_type='ppg', low_rri=300, high_rri=2000, ): """ Evaluate the invalid peaks (which exceeds normal range) base on HRV rules: Malik, Karlsson, Kamath, Acar Output the ratio of invalid Parameters ---------- data_sample : rule_index: 0: Default Outlier Peak 1: Malik 2: Karlsson 3: Kamath 4: Acar (Default rule is Malik) sample_rate : (Default value = 100) rpeak_detector : (Default value = 0) To explain other detector options low_rri : (Default value = 300) high_rri : (Default value = 2000) Returns ------- """ rules = ["malik", "karlsson", "kamath", "acar"] try: wd, m = hp.process(data_sample, sample_rate, calc_freq=True) except: try: wd, m = hp.process(data_sample, sample_rate) except: error_dict = {rule + "_error": np.nan for rule in rules} error_dict["outlier_error"] = np.nan return error_dict # if rpeak_detector in [1, 2, 3, 4]: if wave_type == 'ecg': detector = PeakDetector(wave_type='ecg') peak_list = detector.ecg_detector(data_sample, rpeak_detector)[0] else: detector = PeakDetector(wave_type='ppg') peak_list = detector.ppg_detector(data_sample, rpeak_detector, preprocess=False)[0] wd["peaklist"] = peak_list wd = calc_rr(peak_list, sample_rate, working_data=wd) wd = check_peaks(wd['RR_list'], wd['peaklist'], wd['ybeat'], reject_segmentwise=False, working_data=wd) wd = clean_rr_intervals(working_data=wd) rr_intervals = wd["RR_list"] rr_intervals_cleaned = remove_outliers(rr_intervals, low_rri=low_rri, high_rri=high_rri) number_outliers = len(np.where(np.isnan(rr_intervals_cleaned))[0]) outlier_ratio = number_outliers / (len(rr_intervals_cleaned) - number_outliers) if rule_index == 0: return outlier_ratio # error_sqi = {} # error_sqi['outlier_error'] = outlier_ratio interpolated_rr_intervals = interpolate_nan_values(rr_intervals_cleaned) rule = rules[rule_index] nn_intervals = remove_ectopic_beats(interpolated_rr_intervals, method=rule) number_ectopics = len(np.where(np.isnan(nn_intervals))[0]) ectopic_ratio = number_ectopics / (len(nn_intervals) - number_ectopics) return ectopic_ratio
def test_on_init(self): detector = PeakDetector() pass
def segment_PPG_SQI_extraction(signal_segment, sampling_rate=100, primary_peakdet=7, secondary_peakdet=6, hp_cutoff_order=(1, 1), lp_cutoff_order=(20, 4), template_type=1): """ Extract all package available SQIs from a single segment of PPG waveform. Return a dataframe with all SQIs and cut points for each segment. Parameters ---------- signal_segment : array-like A segment of raw signal. The length is user defined in compute_SQI() function. sampling_rate : int Sampling rate of the signal primary_peakdet : int Selects one of the peakdetectors from the PeakDetector class. The primary one is used to segment the waveform. secondary_peakdet : int Selects one of the peakdetectors from the PeakDetector class. The secondary peakdetector is used to compute MSQ SQI. hp_cutoff_order : touple (int, int) A high pass filter parameters, cutoff frequency and order Lp_cutoff_order : touple (int, int) A low pass filter parameters, cutoff frequency and order template_type : int Selects which template from the dtw SQI should be used Returns ------- Pandas series object with all SQIs for the given segment """ raw_segment = signal_segment[signal_segment.columns[1]].to_numpy() # Prepare final dictonary that will be converted to dataFrame at the end SQI_dict = { 'first': signal_segment['idx'][0], 'last': signal_segment['idx'][-1] } # Prepare filter and filter signal filt = BandpassFilter(band_type='butter', fs=sampling_rate) filtered_segment = filt.signal_highpass_filter(raw_segment, cutoff=hp_cutoff_order[0], order=hp_cutoff_order[1]) filtered_segment = filt.signal_lowpass_filter(filtered_segment, cutoff=lp_cutoff_order[0], order=lp_cutoff_order[1]) # Prepare primary peak detector and perform peak detection detector = PeakDetector() peak_list, trough_list = detector.ppg_detector(filtered_segment, primary_peakdet) # Helpful lists for iteration variations_stats = ['', '_mean', '_median', '_std'] variations_acf = [ '_peak1', '_peak2', '_peak3', '_value1', '_value2', '_value3' ] stats_functions = [('skewness', sq.standard_sqi.skewness_sqi), ('kurtosis', sq.standard_sqi.kurtosis_sqi), ('entropy', sq.standard_sqi.entropy_sqi)] # Raw signal SQI computation SQI_dict['snr'] = np.mean(sq.standard_sqi.signal_to_noise_sqi(raw_segment)) SQI_dict['perfusion'] = sq.standard_sqi.perfusion_sqi(y=filtered_segment, x=raw_segment) SQI_dict['mean_cross'] = sq.standard_sqi.mean_crossing_rate_sqi( raw_segment) # Filtered signal SQI computation SQI_dict['zero_cross'] = sq.standard_sqi.zero_crossings_rate_sqi( filtered_segment) SQI_dict['msq'] = sq.standard_sqi.msq_sqi(y=filtered_segment, peaks_1=peak_list, peak_detect2=secondary_peakdet) correlogram_list = sq.rpeaks_sqi.correlogram_sqi(filtered_segment) for idx, variations in enumerate(variations_acf): SQI_dict['correlogram' + variations] = correlogram_list[idx] # Per beat SQI calculation dtw_list = sq.standard_sqi.per_beat_sqi(sqi_func=sq.dtw_sqi, troughs=trough_list, signal=filtered_segment, taper=True, template_type=template_type) SQI_dict['dtw_mean'] = np.mean(dtw_list) SQI_dict['dtw_std'] = np.std(dtw_list) for funcion in stats_functions: SQI_dict[funcion[0] + variations_stats[0]] = funcion[1](filtered_segment) statSQI_list = sq.standard_sqi.per_beat_sqi(sqi_func=funcion[1], troughs=trough_list, signal=filtered_segment, taper=True) SQI_dict[funcion[0] + variations_stats[1]] = np.mean(statSQI_list) SQI_dict[funcion[0] + variations_stats[2]] = np.median(statSQI_list) SQI_dict[funcion[0] + variations_stats[3]] = np.std(statSQI_list) # return pd.Series(SQI_dict)
def test_on_detect_peak_trough_billauer(self): detector = PeakDetector() pass
def test_ecg_detector(self): detector = PeakDetector() pass
def test_on_search_for_onset(self): detector = PeakDetector() pass
def test_on_get_moving_average(self): detector = PeakDetector() pass
def test_on_detect_peak_trough_slope_sum(self): detector = PeakDetector() pass
def test_on_ppg_detector(self): detector = PeakDetector() pass
def test_on_detect_peak_trough_clusterer(self): detector = PeakDetector() pass
def test_on_compute_feature(self): detector = PeakDetector() pass
def test_on_matched_filter_detector(self): detector = PeakDetector() pass