def test_find_peaks_empty(self): x = [] hp, sp = processing.find_peaks(x) assert hp.shape == (0,) assert sp.shape == (0,)
def test_find_peaks(self): x = [0, 2, 1, 0, -10, -15, -15, -15, 9, 8, 0, 0, 1, 2, 10] hp, sp = processing.find_peaks(x) assert np.array_equal(hp, [1, 8]) assert np.array_equal(sp, [6, 10])
def expand_ecg(datas, type=1, gain=1.1, bate=0.9) -> list: datas_expand = datas.copy() if type == 0: # R- r_peak_index = detext_xqrs(datas) i_r = r_peak_index + 10 if i_r > 179: i_r = 179 for i in range(r_peak_index - 10, i_r, 1): # for i in range(len(datas)): datas_expand[i] *= bate return datas_expand if type == 1: # R+ # r_peak_index = detext_xqrs(datas) # for i in range(r_peak_index - 5, r_peak_index + 5, 1): for i in range(len(datas)): datas_expand[i] *= gain return datas_expand elif type == 2: # R scale expand_width_percent = 0.15 expand_width_step = int(math.floor(1.0 / expand_width_percent)) for i in range(75 + 20, 75 - 20, -expand_width_step): sub_datas = datas_expand[i - 1:i + 1] # Max # s = max(sub_datas) # Mean s = 0. for sub_d in sub_datas: s += sub_d s = s / len(sub_datas) datas_expand.insert(i, s) iC = int((20 + 25.0) / expand_width_step / 2.0) - 1 # datas_expand.extend([0 for j in range(iC)]) # r_peak_index = detext_xqrs(datas_expand) r_peak_index = 75 + iC # if r_peak_index < iC: # r_peak_index = iC datas_expand = datas_expand[r_peak_index - 75:r_peak_index + 105] return datas_expand elif type == 3: datas_expand = expand_ecg(datas_expand, 1) datas_expand = expand_ecg(datas_expand, 2) return datas_expand elif type == 4: datas_expand = expand_ecg(datas_expand, 0) datas_expand = expand_ecg(datas_expand, 2) return datas_expand elif type == 5: # -- return expand_ecg(datas_expand, 0, bate=0.82) elif type == 6: # ++ return expand_ecg(datas_expand, 1, gain=1.18) elif type == 7: # -+All Peak Up/Down peaks, _ = wfp.find_peaks(datas_expand) peaks = peaks.tolist() n_peak = [] last_peak_value = 0 for k in range(len(peaks) - 2): if abs(peaks[k] - last_peak_value) > 10: n_peak.append(peaks[k]) last_peak_value = peaks[k] for p in n_peak: p_l = p - 5 if p_l < 0: p_l = 0 p_r = p + 5 if p_r > 179: p_r = 179 for i in range(p_l, p_r): datas_expand[i] *= gain # if datas_expand[i] > 0: # datas_expand[i] *= gain # else: # datas_expand[i] *= bate return datas_expand elif type == 8: # ++--All Peak Random 不好 peaks, _ = wfp.find_peaks(datas_expand) peaks = peaks.tolist() n_peak = [] last_peak_value = 0 for k in range(len(peaks) - 2): if abs(peaks[k] - last_peak_value) > 10: n_peak.append(peaks[k]) last_peak_value = peaks[k] for p in n_peak: p_l = p - 5 if p_l < 0: p_l = 0 p_r = p + 5 if p_r > 179: p_r = 179 r_i = np.random.randint(2) for i in range(p_l, p_r): if r_i == 0: datas_expand[i] *= gain else: datas_expand[i] *= bate return datas_expand elif type == 9: # ++--All Peak Up + peaks, _ = wfp.find_peaks(datas_expand) peaks = peaks.tolist() n_peak = [] last_peak_value = 0 for k in range(len(peaks) - 2): if abs(peaks[k] - last_peak_value) > 10: n_peak.append(peaks[k]) last_peak_value = peaks[k] for p in n_peak: p_l = p - 5 if p_l < 0: p_l = 0 p_r = p + 5 if p_r > 179: p_r = 179 for i in range(p_l, p_r): datas_expand[i] *= 1.15 return datas_expand else: return datas