Exemple #1
0
def ex1_2_3_mean_square_error():
    '''"# Empirically, hamilton segmenter performed better for RRI
    ecg_signal_resampled = resample(ecg_signal,
                                    len(ppg_signal))  # we need to resample the eeg signal to rate of the ppg signal
    assert len(ecg_signal_resampled) == len(ppg_signal)
    ecg_peak_indices = ecg_processing.hamilton_segmenter(ecg_signal_resampled, ppg_sf)[
        'rpeaks']  # now it has the same sf as the ppg signal
    ecg_peak_times = ecg_peak_indices / ppg_sf
    rri = np.diff(ecg_peak_times)
    '''

    # Empirically, Elgendi performed better for RRI
    ecg_signal_resampled = resample(ecg_signal, len(ppg_signal))  # we need to resample the eeg signal to rate of the ppg signal
    assert len(ecg_signal_resampled) == len(ppg_signal)
    ecg_sf = ppg_sf
    ecg_peak_times = ex1_2_4_elgendi()
    rri = np.diff(ecg_peak_times)

    # Empirically, find_peaks performed better for PPI
    filtered_ppg_signal = ppg_processing.bvp(ppg_signal, ppg_sf, show=False)['filtered']
    ppg_peak_indices, _ = find_peaks(filtered_ppg_signal, prominence=35, distance=0.5 * ppg_sf)
    ppg_peak_times = ppg_peak_indices / ppg_sf
    ppi = np.diff(ppg_peak_times)

    # we need to trim the bigger one, if the algorithms did not find the same amount of peaks
    if len(rri) > len(ppi):
        rri = rri[:len(ppi)]
    if len(ppi) > len(rri):
        ppi = ppi[:len(rri)]

    mse = np.square(np.subtract(rri, ppi)).mean()
    print("MSE:", mse)
    def _process_signal(self, signal, freq):
        biosppy_processed = bvp(signal, freq, show=self.plot)
        y_filtered = biosppy_processed['filtered']
        if isinstance(y_filtered, list):
            y = np.asarray(y_filtered)
        else:
            y = y_filtered

        measures = hb.process(y, freq, calc_freq=True)
        return measures
Exemple #3
0
def ex1_2_2_scipy():
    # Find peaks
    print(ppg_sf)
    filtered_ppg_signal = ppg_processing.bvp(ppg_signal, ppg_sf, show=False)['filtered']
    ppg_peak_indices, _ = find_peaks(filtered_ppg_signal, prominence=35, distance=0.5 * ppg_sf)
    ppg_peak_times = ppg_peak_indices / ppg_sf

    plot_peaks(filtered_ppg_signal, ppg_times, ppg_peak_times, ppg_peak_indices, ppg_envelopes, ppg_label, 'findpeaks',
               'results_findpeaks', (-300, 300))
    plot_peak_intervals(ppg_peak_times, ppg_label, 'findpeaks', 'PPI', 'results_findpeaks')
def load_feel_data(sub_id):  # TODO machine learning to maximize this shit
    feel_loc = "../Data/FeelData/EEGStudy1/Feel_Data/"
    feel_data = loadmat(feel_loc + "Subject_" + str(sub_id) + ".mat")
    ts = feel_data['numStart'][0]
    bvp_data = [i[0] for i in feel_data['denB']]
    temp_data = [i[0] for i in feel_data['denT']]
    gsr_data = [i[0] for i in feel_data['denG']]
    secs = int(ts[5])
    ms = 1000 * (int(ts[5]) - secs)
    start_time = datetime.datetime(int(ts[0]), int(ts[1]), int(ts[2]),
                                   int(ts[3]), int(ts[4]), secs, ms)
    time_data = [
        datetime.time(int(t_raw[0]), int(t_raw[1]), int(t_raw[2]))
        for t_raw in feel_data['ACQ']
    ]
    bvp_sig = bvp.bvp(signal=bvp_data, sampling_rate=20, show=True)
Exemple #5
0
def ex1_2_2_biosspy():
    # Find onsets
    # It uses Zong et al. approach, which skips corrupted signal parts
    result = ppg_processing.bvp(ppg_signal, ppg_sf, show=False)
    filtered_ppg_signal, ppg_onset_indices, heart_rate_ts, heart_rate = result['filtered'], result['onsets'], result[
        'heart_rate_ts'], result['heart_rate']
    ppg_onset_times = ppg_onset_indices / ppg_sf

    plot_onsets(filtered_ppg_signal, ppg_times, ppg_onset_times, ppg_envelopes, ppg_label, 'bvp',
                'results_bvp', (-300, 300))
    plot_peak_intervals(ppg_onset_times, ppg_label, 'bvp', 'Onset intervals', 'results_bvp')

    # Plot heart rate, just for fun
    fig = plt.figure(figsize=(16, 4))
    plt.plot(heart_rate_ts, heart_rate)
    mean = np.mean(heart_rate)
    plt.plot((0, heart_rate_ts[-1]), [mean, mean], 'r-', label='Mean')
    plt.xlabel('Time [s]')
    plt.ylabel('Instantaneous Heart Rate [bpm]')
    plt.grid()
    plt.title("Heart rate derived from PPG")
    fig.tight_layout()
    plt.legend(loc='upper right')
    fig.savefig("results_bvp/heart_rate")
Exemple #6
0
	def parsePhysiological(self, log, participantid):
		print "Participant: ", participantid
		rawSCArr = []
		rawHRArr = []
		hrUnfiltered = []
		scUnfiltered = []
		scFiltered = []
		hrFiltered = []
		timestamps = []
		heartrates = {}
		skinconductances = {}

		for ts in self.timestamps:
			heartrates[ts] = {
				'measurements': [],
				'value': 0
			}
			skinconductances[ts] = {
				'measurements': [],
				'value': 0
			}

		hrRange = (1000/samplingRate)*hrInterval #range for BPM calculation in Number of measurements (array positions)
		for k,v in enumerate(log):
			if k>2:
				ts = int(float(v[0]))
				timestamps.append(ts)
				hrUnfiltered.append(float(v[13]))
				scUnfiltered.append(float(v[8]))

		numberOfMeasurements = len(hrUnfiltered)
		# print numberOfMeasurements, "--", participantid

		scFiltered = scUnfiltered

		for k,v in enumerate(hrUnfiltered):
			# print k , " - ", numberOfMeasurements, " - ", participantid
			measurement = None
			sample = []
			if k>hrRange/2 and k<numberOfMeasurements-hrRange/2:
				sample = hrUnfiltered[k-hrRange/2:k+hrRange/2]
			elif k<hrRange/2:
				sample = hrUnfiltered[0:hrRange]
			else:
				sample = hrUnfiltered[numberOfMeasurements-hrRange:numberOfMeasurements-1]


			try:
				bvpArr = bvp.bvp(sample,20,show=False)
				measurement = np.nanmean(bvpArr["heart_rate"])
				hrFiltered.append(measurement)
			except: 
				# print("could not compute bpm")
				measurement = hrFiltered[k-1]
				hrFiltered.append(measurement)
			# print measurement

		for k,v in enumerate(hrFiltered):
			modulo = timestamps[k]%500
			tsRounded = timestamps[k]-modulo
			if tsRounded in self.timestamps:
				heartrates[tsRounded]['measurements'].append(v)
				skinconductances[tsRounded]['measurements'].append(scFiltered[k])
		for m in heartrates:
			heartrates[m]['value'] = np.nanmean(heartrates[m]['measurements'])
			skinconductances[m]['value'] = np.nanmean(skinconductances[m]['measurements'])

		# print heartrates

		
		# self.rawSkinconductances[participantid] = rawSCArr
		# self.rawHeartrates[participantid] = rawHRArr

		self.heartrates[participantid] = heartrates
		self.skinconductances[participantid] = skinconductances

		print "YOLO"
		print len(self.heartrates[participantid])
		print len(self.timestamps)
Exemple #7
0
            # timestamp = timestamp0 + timestamp1*256 + timestamp2*65536

            timestamp = int(round(time.time() * 1000))
            # print timestamp

            # allHR.append(PPG_raw)

            sc = {"data": [{"timestamp": timestamp, "value": GSR_ms}]}

            #calculate bpm for last X seconds: at 20HZ, that's every X*20 measurements.
            numMeasurements = 1200  #60 sec -> each timestamp will calculate for last 60sec
            allHR.append(PPG_raw)
            if len(allHR) >= numMeasurements:
                # print allHR[len(allHR)-200:len(allHR)-1]
                x = bvp.bvp(allHR[len(allHR) - numMeasurements:len(allHR) - 1],
                            20,
                            show=False)
                # print "heart rate:"
                # print x['heart_rate']
                # allHR = []
                bpm = np.mean(x['heart_rate'])
                # print bpm
                hr = {"data": [{"timestamp": timestamp, "value": bpm}]}
                hrReq = requests.post('http://localhost:3000/postHR', json=hr)

            # print json.dumps(obj)
            # print hr

            scReq = requests.post('http://localhost:3000/postSC', json=sc)
            # print hrReq.json()
Exemple #8
0
    formatter_class=argparse.ArgumentDefaultsHelpFormatter)
parser.add_argument("csv_file", type=str, help="File constaining the raw data")
parser.add_argument('-s',
                    '--sampling-rate',
                    type=int,
                    default=1000,
                    help="The sample rate")
parser.add_argument('-m',
                    '--min-amplitude',
                    type=float,
                    default=0.1,
                    help="Min amplitude (for EDA)")
args = parser.parse_args()

signals = np.loadtxt(args.csv_file, delimiter=',')
t = signals[:, 0]
bvp_raw = signals[:, 1]
eda_raw = signals[:, 2]

bvp_signal = bvp.bvp(bvp_raw, sampling_rate=args.sampling_rate)
eda_signal = eda.eda(eda_raw,
                     sampling_rate=args.sampling_rate,
                     min_amplitude=args.min_amplitude)

print(bvp_signal)
print(eda_signal)
#
# with open(csv_file_name, 'w') as csv_file:
#     csv_reader = csv.reader(csv_file, delimiter=',')
#     for row in csv_reader: