Example #1
0
def test_ppg_simulate(duration, sampling_rate, heart_rate, freq_modulation):

    ppg = nk.ppg_simulate(
        duration=duration,
        sampling_rate=sampling_rate,
        heart_rate=heart_rate,
        frequency_modulation=freq_modulation,
        ibi_randomness=0,
        drift=0,
        motion_amplitude=0,
        powerline_amplitude=0,
        burst_amplitude=0,
        burst_number=0,
        random_state=42,
        show=False,
    )

    assert ppg.size == duration * sampling_rate

    signals, _ = nk.ppg_process(ppg, sampling_rate=sampling_rate)
    assert np.allclose(signals["PPG_Rate"].mean(), heart_rate, atol=1)

    # Ensure that the heart rate fluctuates in the requested range.
    groundtruth_range = freq_modulation * heart_rate
    observed_range = np.percentile(signals['PPG_Rate'], 90) - np.percentile(
        signals['PPG_Rate'], 10)
    assert np.allclose(groundtruth_range,
                       observed_range,
                       atol=groundtruth_range * .15)
Example #2
0
def _physio_process(data_path):
    """Basic signal cleaning and peak extraction."""
    json_path = str(data_path).replace("tsv.gz", "json")
    meta = read_json(json_path)
    sampling_rate = meta["SamplingFrequency"]
    data = read_tsv(data_path,
                    compression="gzip",
                    names=meta["Columns"],
                    index_col=False)
    info = {}
    rsp_signals, rsp_info = nk.rsp_process(data["respiratory"], sampling_rate)
    px_signals, px_info = nk.ppg_process(data["cardiac"], sampling_rate)
    info.update(rsp_info)
    info.update(px_info)
    signal = pd.concat([rsp_signals, px_signals], axis=1)
    signal["time"] = signal.index * 1 / sampling_rate
    signal = signal.set_index("time")
    info["SamplingFrequency"] = sampling_rate
    return signal, info
Example #3
0
def neurokit_index(request):
    data = pd.read_csv(
        "/Users/siyuqian/Study/django-docker/712AF22B_Mar11_14-07-59.csv")
    # Generate 15 seconds of PPG signal (recorded at 250 samples / second)
    # ppg = nk.ppg_simulate(duration=15, sampling_rate=250, heart_rate=70)
    ppg = nk.ppg_process(data['PPG'], sampling_rate=50)

    # Clear the noise
    ppg_clean = nk.ppg_clean(ppg)

    # Peaks
    peaks = nk.ppg_findpeaks(ppg_clean, sampling_rate=100)

    # Compute HRV indices
    hrv_indices = nk.hrv(peaks, sampling_rate=100, show=True)
    result = hrv_indices.to_json()
    parsed = json.loads(result)
    context = {'response': json.dumps(parsed)}
    return render(request, 'neurokit/neurokit_index.html', context)
Example #4
0
def process_bvp(bvp, show_fig=False):
    """
        Compute BVP signal features (more info: https://neurokit2.readthedocs.io/en/latest/functions.html#module-neurokit2.ppg).
        Compute HRV indices (more info: https://neurokit2.readthedocs.io/en/latest/functions.html#module-neurokit2.hrv)
        Parameters
        ----------
        bvp : dict [timestamp : value]
            EDA signal.

        Returns
        -------
        bvp_signals : DataFrame
        bvp_info : dict
    """

    bvp_signals, bvp_info = nk.ppg_process(bvp['value'], sampling_rate=64)

    # First 5 seconds of the signal.
    # Find peaks
    peaks = bvp_signals['PPG_Peaks'][:320]

    # Compute HRV indices
    time_hrv = nk.hrv_time(peaks, sampling_rate=64, show=show_fig)

    hrv_base = time_hrv
    hrv_base['type'] = 'base'

    # The rest part of the signal.
    # Find peaks
    peaks = bvp_signals['PPG_Peaks'][320:]

    # Compute HRV indices
    phase_hrv = nk.hrv_frequency(peaks, sampling_rate=64, show=show_fig)
    time_hrv = nk.hrv_time(peaks, sampling_rate=64, show=show_fig)
    nonlinear_hrv = nk.hrv_nonlinear(peaks, sampling_rate=64, show=show_fig)

    hrv_indices = pd.concat([phase_hrv, time_hrv, nonlinear_hrv], axis=1)
    hrv_indices['type'] = 'stimul'

    hrv_indices = pd.concat([hrv_indices, hrv_base])

    return bvp_signals, bvp_info, hrv_indices
Example #5
0
def test_ppg_simulate_ibi(ibi_randomness, std_heart_rate):

    ppg = nk.ppg_simulate(
        duration=20,
        sampling_rate=50,
        heart_rate=70,
        frequency_modulation=0,
        ibi_randomness=ibi_randomness,
        drift=0,
        motion_amplitude=0,
        powerline_amplitude=0,
        burst_amplitude=0,
        burst_number=0,
        random_state=42,
        show=False,
    )

    assert ppg.size == 20 * 50

    signals, _ = nk.ppg_process(ppg, sampling_rate=50)
    assert np.allclose(signals["PPG_Rate"].mean(), 70, atol=1.5)

    # Ensure that standard deviation of heart rate
    assert np.allclose(signals["PPG_Rate"].std(), std_heart_rate, atol=1)
Example #6
0
plot = nk.emg_plot(signals, sampling_rate=250)
plot.set_size_inches(10, 6, forward=True)
plot.savefig("README_emg.png", dpi=300, h_pad=3)

# =============================================================================
# Photoplethysmography (PPG/BVP)
# =============================================================================

# Generate 15 seconds of PPG signal (recorded at 250 samples / second)
ppg = nk.ppg_simulate(duration=15,
                      sampling_rate=250,
                      heart_rate=70,
                      random_state=333)

# Process it
signals, info = nk.ppg_process(ppg, sampling_rate=250)

# Visualize the processing
nk.ppg_plot(signals, sampling_rate=250)

# Save it
plot = nk.ppg_plot(signals, sampling_rate=250)
plot.set_size_inches(10, 6, forward=True)
plot.savefig("README_ppg.png", dpi=300, h_pad=3)

# =============================================================================
# Electrooculography (EOG)
# =============================================================================

# Import EOG data
eog_signal = nk.data("eog_100hz")
Example #7
0
def extract_bvp_features(bvp_data, sampling_rate):
    # Extract Heart Rate, RR Interval, and Heart Rate Variability features from PPG signals
    # bvp_data = MinMaxScaler().fit_transform(np.array(bvp_data).reshape(-1, 1)).ravel()
    ppg_signals, info = nk.ppg_process(bvp_data, sampling_rate=sampling_rate)
    hr = ppg_signals['PPG_Rate']
    # hr = MinMaxScaler().fit_transform(np.array(hr).reshape(-1, 1)).ravel()
    peaks = info['PPG_Peaks']

    # Sanitize input
    peaks = _hrv_sanitize_input(peaks)
    if isinstance(peaks, tuple):  # Detect actual sampling rate
        peaks, sampling_rate = peaks[0], peaks[1]
    rri = _hrv_get_rri(peaks, sampling_rate=sampling_rate, interpolate=False)
    diff_rri = np.diff(rri)
    hrv_features = nk.hrv(
        peaks, sampling_rate=sampling_rate
    )  # Ignore NeuroKitWarning: The duration of recording is too short to support a sufficiently long window for high frequency resolution as we used another frequency for hrv_frequency
    hrv_frequency = nk.hrv_frequency(
        peaks,
        sampling_rate=sampling_rate,
        ulf=(0.01, 0.04),
        lf=(0.04, 0.15),
        hf=(0.15, 0.4)
    )  # the parameters of ULF, LF, HF follows the original paper of WESAD dataset

    # Philip Schmidt, Attila Reiss, Robert Duerichen, Claus Marberger, and Kristof Van Laerhoven. 2018. Introducing WESAD, a Multimodal Dataset for Wearable Stress and Affect Detection.
    # In Proceedings of the 20th ACM International Conference on Multimodal Interaction (ICMI '18). Association for Computing Machinery, New York, NY, USA, 400–408. DOI:https://doi.org/10.1145/3242969.3242985
    # Not including: f_x_HRV of ULF and HLF, rel_f_x, sum f_x_HRV
    mean_HR, std_HR = np.mean(hr), np.std(hr)
    mean_HRV, std_HRV = hrv_features['HRV_MeanNN'], hrv_features['HRV_SDNN']
    HRV_ULF, HRV_LF, HRV_HF, HRV_LFHF, HRV_LFnorm, HRV_HFnorm = hrv_frequency[
        'HRV_ULF'], hrv_frequency['HRV_LF'], hrv_frequency[
            'HRV_HF'], hrv_frequency['HRV_LFHF'], hrv_frequency[
                'HRV_LFn'], hrv_frequency['HRV_HFn']
    rms = np.sqrt(np.nanmean(rri**2))
    nn50 = np.sum(np.abs(diff_rri) > 50)
    HRV_TINN, HRV_pNN50, HRV_RMSSD = hrv_features['HRV_TINN'], hrv_features[
        'HRV_pNN50'], hrv_features['HRV_RMSSD']

    # Nkurikiyeyezu, K., Yokokubo, A., & Lopez, G. (2019). The Influence of Person-Specific Biometrics in Improving Generic Stress Predictive Models.
    # ArXiv, abs/1910.01770.
    kurtosis_HRV, skewness_HRV = kurtosis(rri), skew(rri)
    HRV_VLF = hrv_frequency['HRV_VLF']
    HRV_SD1, HRV_SD2 = hrv_features['HRV_SD1'], hrv_features['HRV_SD2']
    HRV_SDSD = hrv_features['HRV_SDSD']
    HRV_SDSD_RMSSD = HRV_SDSD / HRV_RMSSD
    adj_sum_rri = diff_rri + 2 * rri[:-1]
    HRV_pNN25 = np.sum(np.abs(diff_rri) > 25) / len(rri) * 100
    relative_RRI = 2 * diff_rri / adj_sum_rri
    mean_relativeRRI, median_relativeRRI, std_relativeRRI, RMSSD_relativeRRI, kurtosis_relativeRRI, skew_relativeRRI = np.mean(
        relative_RRI), np.median(relative_RRI), np.std(relative_RRI), np.sqrt(
            np.mean(np.diff(relative_RRI)**2)), kurtosis(relative_RRI), skew(
                relative_RRI)

    # Combining the extracted features
    features = [
        mean_HR, std_HR, mean_HRV, std_HRV, kurtosis_HRV, skewness_HRV, rms,
        nn50, HRV_pNN50, HRV_pNN25, HRV_TINN, HRV_RMSSD, HRV_LF, HRV_HF,
        HRV_LFHF, HRV_LFnorm, HRV_HFnorm, HRV_SD1, HRV_SD2, HRV_SDSD,
        HRV_SDSD_RMSSD, mean_relativeRRI, median_relativeRRI, std_relativeRRI,
        RMSSD_relativeRRI, kurtosis_relativeRRI, skew_relativeRRI
    ]
    features = np.array(list(map(float, features)))
    return features