def extract_hrv(rri): features = [] hrv_data = hrv.time_domain(rri) hrv_data.update(hrv.frequency_domain(rri)) keys = list(hrv_data.keys()) keys.sort() for key in keys: features.append(hrv_data[key]) return features
def r_features(r_peaks): times = np.diff(r_peaks) avg = np.mean(times) filtered = sum([1 if i < 0.5 * avg else 0 for i in times]) total = len(r_peaks) data = hrv.time_domain(times) data['filtered_r'] = filtered data['rel_filtered_r'] = filtered / total # RRI Velocity diff_rri = np.diff(times) data.update(add_suffix(hrv.time_domain(diff_rri), "fil1")) # RRI Acceleration diff2_rri = np.diff(diff_rri) data.update(add_suffix(hrv.time_domain(diff2_rri), "fil2")) return data
def r_features(s, r_peaks): r_vals = [s[i] for i in r_peaks] times = np.diff(r_peaks) avg = np.mean(times) filtered = sum([1 if i < 0.5 * avg else 0 for i in times]) total = len(r_vals) if len(r_vals) > 0 else 1 data = hrv.time_domain(times) data['beats_to_length'] = len(r_peaks) / len(s) data['r_mean'] = np.mean(r_vals) data['r_std'] = np.std(r_vals) data['filtered_r'] = filtered data['rel_filtered_r'] = filtered / total return data
def r_features(r_peaks): # Sanity check after artifact removal if len(r_peaks) < 5: print( "NeuroKit Warning: ecg_hrv(): Not enough normal R peaks to compute HRV :/" ) r_peaks = [1000, 2000, 3000, 4000] hrv_dict = dict() RRis = np.diff(r_peaks) RRis = RRis / 500 RRis = RRis.astype(float) # Artifact detection - Statistical rr1 = 0 rr2 = 0 rr3 = 0 rr4 = 0 median_rr = np.median(RRis) for index, rr in enumerate(RRis): # Remove RR intervals that differ more than 25% from the previous one if rr < 0.6: rr1 += 1 if rr > 1.3: rr2 += 1 if rr < median_rr * 0.75: rr3 += 1 if rr > median_rr * 1.25: rr4 += 1 # Artifacts treatment hrv_dict["n_Artifacts1"] = rr1 / len(RRis) hrv_dict["n_Artifacts2"] = rr2 / len(RRis) hrv_dict["n_Artifacts3"] = rr3 / len(RRis) hrv_dict["n_Artifacts4"] = rr4 / len(RRis) hrv_dict["RMSSD"] = np.sqrt(np.mean(np.diff(RRis)**2)) hrv_dict["meanNN"] = np.mean(RRis) hrv_dict["sdNN"] = np.std(RRis, ddof=1) # make it calculate N-1 hrv_dict["cvNN"] = hrv_dict["sdNN"] / hrv_dict["meanNN"] hrv_dict["CVSD"] = hrv_dict["RMSSD"] / hrv_dict["meanNN"] hrv_dict["medianNN"] = np.median(abs(RRis)) hrv_dict["madNN"] = mad(RRis, constant=1) hrv_dict["mcvNN"] = hrv_dict["madNN"] / hrv_dict["medianNN"] nn50 = sum(abs(np.diff(RRis)) > 50) nn20 = sum(abs(np.diff(RRis)) > 20) hrv_dict["pNN50"] = nn50 / len(RRis) * 100 hrv_dict["pNN20"] = nn20 / len(RRis) * 100 hrv_dict["Shannon"] = complexity_entropy_shannon(RRis) hrv_dict["Sample_Entropy"] = nolds.sampen(RRis, emb_dim=2) #mse = complexity_entropy_multiscale(RRis, max_scale_factor=20, m=2) #hrv_dict["Entropy_Multiscale_AUC"] = mse["MSE_AUC"] hrv_dict["Entropy_SVD"] = complexity_entropy_svd(RRis, emb_dim=2) hrv_dict["Entropy_Spectral_VLF"] = complexity_entropy_spectral( RRis, 500, bands=np.arange(0.0033, 0.04, 0.001)) hrv_dict["Entropy_Spectral_LF"] = complexity_entropy_spectral( RRis, 500, bands=np.arange(0.04, 0.15, 0.001)) hrv_dict["Entropy_Spectral_HF"] = complexity_entropy_spectral( RRis, 500, bands=np.arange(0.15, 0.40, 0.001)) hrv_dict["Fisher_Info"] = complexity_fisher_info(RRis, tau=1, emb_dim=2) #hrv_dict["FD_Petrosian"] = complexity_fd_petrosian(RRis) #hrv_dict["FD_Higushi"] = complexity_fd_higushi(RRis, k_max=16) hrv_dict.update(hrv.time_domain(RRis)) hrv_dict.update(hrv.frequency_domain(RRis)) # RRI Velocity diff_rri = np.diff(RRis) hrv_dict.update(add_suffix(hrv.time_domain(diff_rri), "fil1")) hrv_dict.update(add_suffix(hrv.frequency_domain(diff_rri), "fil1")) # RRI Acceleration diff2_rri = np.diff(diff_rri) hrv_dict.update(add_suffix(hrv.time_domain(diff2_rri), "fil2")) hrv_dict.update(add_suffix(hrv.frequency_domain(diff2_rri), "fil2")) return hrv_dict