def main(): BoardShim.enable_dev_board_logger() # use synthetic board for demo params = BrainFlowInputParams() board_id = BoardIds.SYNTHETIC_BOARD.value sampling_rate = BoardShim.get_sampling_rate(board_id) board = BoardShim(board_id, params) board.prepare_session() board.start_stream() BoardShim.log_message(LogLevels.LEVEL_INFO.value, 'start sleeping in the main thread') time.sleep(10) data = board.get_current_board_data( DataFilter.get_nearest_power_of_two(sampling_rate)) board.stop_stream() board.release_session() eeg_channels = BoardShim.get_eeg_channels(board_id) for count, channel in enumerate(eeg_channels): # optional: subtract mean or detrend psd = DataFilter.get_psd(data[channel], sampling_rate, WindowFunctions.BLACKMAN_HARRIS.value) band_power_alpha = DataFilter.get_band_power(psd, 7.0, 13.0) band_power_beta = DataFilter.get_band_power(psd, 14.0, 30.0) print("alpha/beta:%f", band_power_alpha / band_power_beta)
def main(): BoardShim.enable_dev_board_logger() # use synthetic board for demo params = BrainFlowInputParams() board = BoardShim(BoardIds.SYNTHETIC_BOARD.value, params) board.prepare_session() board.start_stream() BoardShim.log_message(LogLevels.LEVEL_INFO.value, 'start sleeping in the main thread') time.sleep(10) data = board.get_current_board_data( 20) # get 20 latest data points dont remove them from internal buffer board.stop_stream() board.release_session() eeg_channels = BoardShim.get_eeg_channels(BoardIds.SYNTHETIC_BOARD.value) # demo for downsampling, it just aggregates data for count, channel in enumerate(eeg_channels): print('Original data for channel %d:' % channel) print(data[channel]) if count == 0: downsampled_data = DataFilter.perform_downsampling( data[channel], 3, AggOperations.MEDIAN.value) elif count == 1: downsampled_data = DataFilter.perform_downsampling( data[channel], 2, AggOperations.MEAN.value) else: downsampled_data = DataFilter.perform_downsampling( data[channel], 2, AggOperations.EACH.value) print('Downsampled data for channel %d:' % channel) print(downsampled_data)
def main (): BoardShim.enable_board_logger () DataFilter.enable_data_logger () MLModel.enable_ml_logger () parser = argparse.ArgumentParser () # use docs to check which parameters are required for specific board, e.g. for Cyton - set serial port parser.add_argument ('--timeout', type = int, help = 'timeout for device discovery or connection', required = False, default = 0) parser.add_argument ('--ip-port', type = int, help = 'ip port', required = False, default = 0) parser.add_argument ('--ip-protocol', type = int, help = 'ip protocol, check IpProtocolType enum', required = False, default = 0) parser.add_argument ('--ip-address', type = str, help = 'ip address', required = False, default = '') parser.add_argument ('--serial-port', type = str, help = 'serial port', required = False, default = '') parser.add_argument ('--mac-address', type = str, help = 'mac address', required = False, default = '') parser.add_argument ('--other-info', type = str, help = 'other info', required = False, default = '') parser.add_argument ('--streamer-params', type = str, help = 'streamer params', required = False, default = '') parser.add_argument ('--serial-number', type = str, help = 'serial number', required = False, default = '') parser.add_argument ('--board-id', type = int, help = 'board id, check docs to get a list of supported boards', required = True) parser.add_argument ('--file', type = str, help = 'file', required = False, default = '') args = parser.parse_args () params = BrainFlowInputParams () params.ip_port = args.ip_port params.serial_port = args.serial_port params.mac_address = args.mac_address params.other_info = args.other_info params.serial_number = args.serial_number params.ip_address = args.ip_address params.ip_protocol = args.ip_protocol params.timeout = args.timeout params.file = args.file board = BoardShim (args.board_id, params) master_board_id = board.get_board_id () sampling_rate = BoardShim.get_sampling_rate (master_board_id) board.prepare_session () board.start_stream (45000, args.streamer_params) BoardShim.log_message (LogLevels.LEVEL_INFO.value, 'start sleeping in the main thread') time.sleep (5) # recommended window size for eeg metric calculation is at least 4 seconds, bigger is better data = board.get_board_data () board.stop_stream () board.release_session () eeg_channels = BoardShim.get_eeg_channels (int (master_board_id)) bands = DataFilter.get_avg_band_powers (data, eeg_channels, sampling_rate, True) feature_vector = np.concatenate ((bands[0], bands[1])) print(feature_vector) # calc concentration concentration_params = BrainFlowModelParams (BrainFlowMetrics.CONCENTRATION.value, BrainFlowClassifiers.KNN.value) concentration = MLModel (concentration_params) concentration.prepare () print ('Concentration: %f' % concentration.predict (feature_vector)) concentration.release () # calc relaxation relaxation_params = BrainFlowModelParams (BrainFlowMetrics.RELAXATION.value, BrainFlowClassifiers.REGRESSION.value) relaxation = MLModel (relaxation_params) relaxation.prepare () print ('Relaxation: %f' % relaxation.predict (feature_vector)) relaxation.release ()
def read_data(self): if self.board.get_board_data_count() > 0: raw_data = self.board.get_board_data() raw_eeg_data = utils.extract_eeg_data(raw_data, global_config.BOARD_ID) if self.root_directory_label.text() != "": full_path = self.root_directory_label.text() + "/" + global_config.EEG_DATA_FILE_NAME DataFilter.write_file(raw_eeg_data, full_path, "a") self.slice_generator.write_to_file(self.root_directory_label.text()) # Make room for new samples, discard the oldest self.data_buffer = np.roll(self.data_buffer, shift=-raw_eeg_data.shape[1], axis=1) # Insert new samples first_index = self.feature_extraction_info.first_electrode() - 1 last_index = self.feature_extraction_info.last_electrode() # Not including self.data_buffer[:, self.data_buffer.shape[1] - raw_eeg_data.shape[1]:] = raw_eeg_data[first_index:last_index, :] self.samples_push_count += raw_eeg_data.shape[1] self.sample_count += raw_eeg_data.shape[1] if self.online_training and self.online_training_timer is None: self.online_training_samples_push_count += raw_eeg_data.shape[1] if self.samples_push_count >= self.config.repetition_interval * self.feature_extraction_info.sampling_rate: self.classify_data() self.samples_push_count = 0 if self.online_training_samples_push_count >= self.config.feature_window_size * self.feature_extraction_info.sampling_rate: self.classify_data(online_training=True) self.online_training_samples_push_count = 0 self.online_training_timer = QTimer() self.online_training_timer.singleShot(self.MENTAL_TASK_DELAY, self.next_mental_task) self.clear_highlight_tile()
def denoise(sample, num_channels=16): for channel in range(num_channels): DataFilter.perform_rolling_filter(sample[0][channel], 3, AggOperations.MEDIAN.value) DataFilter.perform_wavelet_denoising(sample[0][channel], 'db6', 3) sample = sample / np.expand_dims(sample.std(axis=-1), axis=-1) return sample
def main (): BoardShim.enable_dev_board_logger () # use synthetic board for demo params = BrainFlowInputParams () board = BoardShim (BoardIds.SYNTHETIC_BOARD.value, params) board.prepare_session () board.start_stream () BoardShim.log_message (LogLevels.LEVEL_INFO.value, 'start sleeping in the main thread') time.sleep (10) data = board.get_current_board_data (20) # get 20 latest data points dont remove them from internal buffer board.stop_stream () board.release_session () # demo how to convert it to pandas DF and plot data eeg_channels = BoardShim.get_eeg_channels (BoardIds.SYNTHETIC_BOARD.value) df = pd.DataFrame (np.transpose (data)) print ('Data From the Board') print (df.head (10)) # demo for data serialization using brainflow API, we recommend to use it instead pandas.to_csv() DataFilter.write_file (data, 'test.csv', 'w') # use 'a' for append mode restored_data = DataFilter.read_file ('test.csv') restored_df = pd.DataFrame (np.transpose (restored_data)) print ('Data From the File') print (restored_df.head (10))
def read_eeg_data(self): seconds_since_start = time.time() - self.timer_start_time seconds = math.floor(self.configurations.trial_duration + self.configurations.relaxation_period - seconds_since_start) self.timer_label.setText("{} sec".format(seconds)) if self.board.get_board_data_count() > 0: raw_data = self.board.get_board_data() raw_eeg_data = utils.extract_eeg_data(raw_data, global_config.BOARD_ID) self.sample_count += raw_eeg_data.shape[ 1] # Add the number of columns present in the newly read data to the count. if self.configurations.validate_saving_info(): # file_name = FileNameFormatter.format(self.configurations.file_name_template, # self.configurations.file_basename, self.current_class.name, self.trial_count) # full_path = self.configurations.root_directory + "/" + file_name + ".csv" # print("Saving to {}".format(full_path)) full_path = self.configurations.root_directory + "/" + global_config.EEG_DATA_FILE_NAME # Currently saves in append mode. If their are files in the directory and they have the same name, # the new data would be appended to the previous, instead of overwriting them. DataFilter.write_file(raw_eeg_data, full_path, "a")
def rolling_filter(self, data, parameter_list): filter_data = [] for i in range(parameter_list[-1]): DataFilter.perform_rolling_filter(data[i], parameter_list[1], parameter_list[2]) filter_data.append(data[i]) filter_data = np.array(filter_data) return filter_data
def high_pass(self, data, parameter_list): filter_data = [] for i in range(parameter_list[-1]): DataFilter.perform_highpass(data[i], parameter_list[1], parameter_list[2], parameter_list[3], parameter_list[4], 3) filter_data.append(data[i]) filter_data = np.array(filter_data) return filter_data
def end_test(): """Ends the test and flushes the saved data in a file in same directory""" data = board.get_board_data() board.stop_stream() board.release_session() now = datetime.now() file = now.strftime("%d.%m.%Y_%H.%M.%S") + "_eeg_data.csv" DataFilter.write_file(data, file, "w") print("\nTest ended.")
def wavelet_filter(self, data, parameter_list): filter_data = [] for i in range(parameter_list[-1]): # print(parameter_list[-1]) # print(i) DataFilter.perform_wavelet_denoising(data[i], parameter_list[1], parameter_list[2]) filter_data.append(data[i]) filter_data = np.array(filter_data) return filter_data
def psd_welch(self, data, parameter_list): feature_data = [] for i in range(parameter_list[-1]): DataFilter.detrend(data[i], 1) DataFilter.detrend(data[i], 2) new_data = DataFilter.get_psd_welch(data[i], parameter_list[1], parameter_list[2], parameter_list[3], parameter_list[4]) feature_data.append(new_data[0]) feature_data = np.array(feature_data) return feature_data
def filtering(signal, sf, chosen_channels): for ch in chosen_channels: copy_signal = copy.deepcopy(signal[ch]) DataFilter.perform_lowpass(copy_signal, sf, 50.0, 5, FilterTypes.CHEBYSHEV_TYPE_1.value, 1) DataFilter.perform_highpass(copy_signal, sf, 3.0, 4, FilterTypes.BUTTERWORTH.value, 0) signal[ch] = copy_signal return signal
def on_next(self, eeg_channels, nfft): data = self.board.get_current_board_data( max(self.sampling_rate, nfft) + 1) #get_board_data () we are taking ~1 sec data ~10 times a sec for channel in self.channels.keys(): channel_data = data[channel] DataFilter.detrend(channel_data, DetrendOperations.LINEAR.value) psd = DataFilter.get_psd_welch( channel_data, nfft, nfft // 2, self.sampling_rate, WindowFunctions.BLACKMAN_HARRIS.value) for component in self.channels[channel]: component.add_band_power_value(psd)
def main (): parser = argparse.ArgumentParser () # use docs to check which parameters are required for specific board, e.g. for Cyton - set serial port parser.add_argument ('--ip-port', type = int, help = 'ip port', required = False, default = 0) parser.add_argument ('--ip-protocol', type = int, help = 'ip protocol, check IpProtocolType enum', required = False, default = 0) parser.add_argument ('--ip-address', type = str, help = 'ip address', required = False, default = '') parser.add_argument ('--serial-port', type = str, help = 'serial port', required = False, default = '') parser.add_argument ('--mac-address', type = str, help = 'mac address', required = False, default = '') parser.add_argument ('--other-info', type = str, help = 'other info', required = False, default = '') parser.add_argument ('--streamer-params', type = str, help = 'other info', required = False, default = '') parser.add_argument ('--board-id', type = int, help = 'board id, check docs to get a list of supported boards', required = True) parser.add_argument ('--log', action = 'store_true') args = parser.parse_args () params = BrainFlowInputParams () params.ip_port = args.ip_port params.serial_port = args.serial_port params.mac_address = args.mac_address params.other_info = args.other_info params.ip_address = args.ip_address params.ip_protocol = args.ip_protocol if (args.log): BoardShim.enable_dev_board_logger () else: BoardShim.disable_board_logger () board = BoardShim (args.board_id, params) board.prepare_session () board.start_stream () print('Session Started') for x in range(2): time.sleep (5) board.config_board ('/2') # enable analog mode only for Cyton Based Boards! time.sleep (5) data = board.get_board_data () board.stop_stream () board.release_session () """ data[BoardShim.get_other_channels(args.board_id)[0]] contains cyton end byte data[BoardShim.get_other_channels(args.board_id)[1....]] contains unprocessed bytes if end byte is 0xC0 there are accel data in data[BoardShim.get_accel_channels(args.board_id)[....]] else there are zeros if end byte is 0xC1 there are analog data in data[BoardShim.get_analog_channels(args.board_id)[....]] else there are zeros """ print (data[BoardShim.get_other_channels(args.board_id)[0]][0:5]) # should be standard end byte 0xC0 print (data[BoardShim.get_other_channels(args.board_id)[0]][-5:]) # should be analog and byte 0xC1 DataFilter.write_file (data, 'cyton_data_new.txt', 'w')
def main (): parser = argparse.ArgumentParser () # use docs to check which parameters are required for specific board, e.g. for Cyton - set serial port parser.add_argument ('--timeout', type = int, help = 'timeout for device discovery or connection', required = False, default = 0) parser.add_argument ('--ip-port', type = int, help = 'ip port', required = False, default = 0) parser.add_argument ('--ip-protocol', type = int, help = 'ip protocol, check IpProtocolType enum', required = False, default = 0) parser.add_argument ('--ip-address', type = str, help = 'ip address', required = False, default = '') parser.add_argument ('--serial-port', type = str, help = 'serial port', required = False, default = '') parser.add_argument ('--mac-address', type = str, help = 'mac address', required = False, default = '') parser.add_argument ('--other-info', type = str, help = 'other info', required = False, default = '') parser.add_argument ('--streamer-params', type = str, help = 'streamer params', required = False, default = '') parser.add_argument ('--serial-number', type = str, help = 'serial number', required = False, default = '') parser.add_argument ('--board-id', type = int, help = 'board id, check docs to get a list of supported boards', required = True) parser.add_argument ('--log', action = 'store_true') args = parser.parse_args () params = BrainFlowInputParams () params.ip_port = args.ip_port params.serial_port = args.serial_port params.mac_address = args.mac_address params.other_info = args.other_info params.serial_number = args.serial_number params.ip_address = args.ip_address params.ip_protocol = args.ip_protocol params.timeout = args.timeout BoardShim.enable_dev_board_logger () board = BoardShim (args.board_id, params) board.prepare_session () board.start_stream () BoardShim.log_message (LogLevels.LEVEL_INFO.value, 'start sleeping in the main thread') time.sleep (10) data = board.get_current_board_data (20) # get 20 latest data points dont remove them from internal buffer board.stop_stream () board.release_session () # demo how to convert it to pandas DF and plot data # eeg_channels = BoardShim.get_eeg_channels (BoardIds.SYNTHETIC_BOARD.value) df = pd.DataFrame (np.transpose (data)) print ('Data From the Board') print (df.head (10)) # demo for data serialization using brainflow API, we recommend to use it instead pandas.to_csv() DataFilter.write_file (data, 'test.csv', 'w') # use 'a' for append mode restored_data = DataFilter.read_file ('test.csv') restored_df = pd.DataFrame (np.transpose (restored_data)) print ('Data From the File') print (restored_df.head (10))
def poll(self, sample_num): try: while self.board.get_board_data_count() < sample_num: time.sleep(0.02) except Exception as e: raise (e) board_data = self.board.get_board_data() DataFilter.write_file(board_data, '.\Data\cyton_data_new.txt', 'a') # 'a' appends; 'w' overwrites # Could add check to see if file already exists, adding a 1, 2, etc. on the end to avoid conflict # Could use date function for generating names based on date-time. df = board_2_df(np.transpose(board_data)) #print('/n') #print(df) return df
def on_next(self, eeg_channels, nfft): time.sleep(.3) data = self.board.get_current_board_data( max(self.sampling_rate, nfft) + 1) #get_board_data () we are taking ~1 sec data ~3 times a sec for channel in self.protocol: channel_data = data[eeg_channels[channel.channel_inx]] DataFilter.detrend(channel_data, DetrendOperations.LINEAR.value) psd = DataFilter.get_psd_welch( channel_data, nfft, nfft // 2, self.sampling_rate, WindowFunctions.BLACKMAN_HARRIS.value) for band in channel.bands: band.add_band_power_value(psd, 30) #print(f'channel: {channel.channel_inx} positive_signals: {channel.get_positive_signals_count()} avg_power: {channel.get_avg_bands()}') return sum([i.get_positive_signals_count() for i in self.protocol])
def on_next(self, eeg_channels, nfft): data = self.board.get_current_board_data( max(self.sampling_rate, nfft) + 1) #get_board_data () we are taking ~1 sec data ~10 times a sec bands_sum = collections.defaultdict(float) for channel in self.protocol: channel_data = data[eeg_channels[channel.channel_inx]] DataFilter.detrend(channel_data, DetrendOperations.LINEAR.value) psd = DataFilter.get_psd_welch( channel_data, nfft, nfft // 2, self.sampling_rate, WindowFunctions.BLACKMAN_HARRIS.value) for band in channel.bands: band.add_band_power_value(psd) bands_sum[band.name] += band.get_signal() return bands_sum
def streamLoop(): pass_data = [] rate = DataFilter.get_nearest_power_of_two(board.rate) data = board.get_board_data() t = data[board.time_channel] data = data[board.eeg_channels] for entry in data: pass_data.append((entry).tolist()) data = {} data['raw'] = pass_data data['times'] = t.tolist() # Send Metadata on First Loop if loopCount == 0: data['sps'] = board.rate data['deviceType'] = 'eeg' data['format'] = 'brainflow' tags = [] for i, channel in enumerate(board.eeg_channels): tags.append({ 'ch': channel - 1, 'tag': board.eeg_names[i], 'analyze': True }) data['eegChannelTags'] = tags return data
def eeg_signals(): #获取原始数据,根据采样率大小取出1s的数据 eeg_data = board.get_current_board_data(sampling_rate)[0:9] # 带通滤波处理(0.5-50),中心频率25.25,带宽49.5 eeg_channels = BoardShim.get_eeg_channels(0) for count, channel in enumerate(eeg_channels): eeg_data[channel] = eeg_data[channel] - np.average(eeg_data[channel]) DataFilter.perform_bandpass(eeg_data[channel], BoardShim.get_sampling_rate(2), 25.25, 49.5, 3, FilterTypes.BESSEL.value, 0) eeg_data = eeg_data[1:9] eeg_data = np.array([eeg_data]) pca = UnsupervisedSpatialFilter(PCA(8), average=False) eeg_data = pca.fit_transform(eeg_data) eeg_data = eeg_data[0] return eeg_data
def main(self): self.board.prepare_session() self.board.start_stream() try: nfft = DataFilter.get_nearest_power_of_two(self.sampling_rate) eeg_channels = BoardShim.get_eeg_channels(self.board_id) time.sleep(3) signals = [] cv2_thread = threading.Thread(target=self.cv2_video_read_thread) audio_thread = threading.Thread(target=self.audio_thread) cv2_thread.start() audio_thread.start() self.player_is_playing = True while self.player_is_playing: signals.append(self.on_next(eeg_channels, nfft)) if len(signals) > 3: signals.pop(0) avg_signal = sum(signals) / len(signals) self.positive_signal = avg_signal < signals[-1] self.last_signal_delta = abs(avg_signal - signals[-1]) if signals[-1] > 9: # min positive signals self.positive_signal = True print( f'up {self.last_signal_delta}' if avg_signal < signals[-1] else f'down {self.last_signal_delta}') # enable it later audio_thread.join() cv2_thread.join() except Exception as e: print(e) return
def main(self): self.board.prepare_session() self.board.start_stream() try: nfft = DataFilter.get_nearest_power_of_two(self.sampling_rate) eeg_channels = BoardShim.get_eeg_channels(self.board_id) time.sleep(3) cv2_thread = threading.Thread(target=self.cv2_video_thread) audio_thread = threading.Thread(target=self.audio_thread) cv2_thread.start() audio_thread.start() self.player_is_playing = True signal_freq_coeff = 1.1 # auto adjustable coefficient? high_signal_freq_coeff = 1.5 data_log_file = open(f'log2-{time.time()}.csv', 'a') print_bands = [] for channel in self.channels.keys(): print_bands.append(','.join( [b.name for b in self.channels[channel]])) data_log_file.write( f'time,metrics_sum,signal,high_signal,{",".join(print_bands)}') metrics_hist = [] while self.player_is_playing: time.sleep(.3) self.on_next(eeg_channels, nfft) metrics_sum = 0.0 for metric in self.metrics: metrics_sum += metric.get_metric() metrics_hist.append(metrics_sum) if len(metrics_hist) > 50: metrics_hist.pop(0) avg_metrics_hist = sum(metrics_hist) / len(metrics_hist) self.positive_signal = avg_metrics_hist < metrics_sum * signal_freq_coeff self.is_last_signal_delta_high = False if self.positive_signal and avg_metrics_hist < metrics_sum * high_signal_freq_coeff: self.is_last_signal_delta_high = True print( f'{self.positive_signal} {avg_metrics_hist} < {metrics_sum*signal_freq_coeff}' ) print_bands = [] for channel in self.channels.keys(): print_bands.append(','.join([ str(b.band_current_power) for b in self.channels[channel] ])) log_line = f'\n{time.asctime(time.gmtime(time.time()))},{metrics_sum},{self.positive_signal},{self.is_last_signal_delta_high},{",".join(print_bands)}' data_log_file.write(log_line) data_log_file.close() audio_thread.join() cv2_thread.join() except Exception as e: print(e) self.player_is_playing = False return
def add_band_power_value(self, psd, max_size): value = DataFilter.get_band_power(psd, self.band_range_min, self.band_range_max) self.band_current_power = value self.power_values.append(value) if (len(self.power_values) > max_size): self.power_values.pop(0)
def _load_session_data(self, subject_name, run): """Loads the session data and event files for a single session for a single subject. The first 5 seconds of every session is a baseline that was used to wait for the signal to settle, so the first 5 seconds of every trial is also removed. Parameters: subject_name run path Returns: data events """ data_fn = subject_name + '_' + self.erp_type + '_' + str(run) + '.csv' event_fn = subject_name + '_' + self.erp_type + '_' + str( run) + '_EVENTS.csv' data_path = os.path.join('data', data_fn) event_path = os.path.join('data', event_fn) data = DataFilter.read_file(data_path) # remove beginning 5 seconds where signal settles idx = 5 * self.eeg_info[1] data = data[:, idx:] events = pd.read_csv(event_path) return data, events
def plot(self): plt.figure() print('Only for channel #' + str(self.channels[0])) data = self.board.get_current_board_data(num_samples=450000) t = data[self.board.time_channel] - self.start_time data = data[self.board.eeg_channels][self.channels[0]] DataFilter.perform_highpass(data, self.board.rate, 3.0, 4, FilterTypes.BUTTERWORTH.value, 0) plt.plot(t, data) plt.title('OpenBCI Stream History') plt.ylabel('Voltage') plt.xlabel('Time (s)') plt.show() return data
def prepare_data(): # use different windows, its kinda data augmentation window_sizes = [4.0, 6.0, 8.0, 10.0] overlaps = [0.5, 0.45, 0.4, 0.35] # percentage of window_size dataset_x = list() dataset_y = list() for data_type in ('relaxed', 'focused'): for file in glob.glob(os.path.join('data', data_type, '*', '*.csv')): print(file) board_id = os.path.basename(os.path.dirname(file)) try: board_id = int(board_id) data = DataFilter.read_file(file) sampling_rate = BoardShim.get_sampling_rate(board_id) eeg_channels = get_eeg_channels(board_id) for num, window_size in enumerate(window_sizes): if data_type == 'focused': cur_pos = sampling_rate * 10 # skip a little more for focus else: cur_pos = sampling_rate * 3 while cur_pos + int( window_size * sampling_rate) < data.shape[1]: data_in_window = data[:, cur_pos:cur_pos + int(window_size * sampling_rate)] bands = DataFilter.get_avg_band_powers( data_in_window, eeg_channels, sampling_rate, True) feature_vector = np.concatenate((bands[0], bands[1])) dataset_x.append(feature_vector) if data_type == 'relaxed': dataset_y.append(0) else: dataset_y.append(1) cur_pos = cur_pos + int( window_size * overlaps[num] * sampling_rate) except Exception as e: print(str(e)) print('Class 1: %d Class 0: %d' % (len([x for x in dataset_y if x == 1]), len([x for x in dataset_y if x == 0]))) with open('dataset_x.pickle', 'wb') as f: pickle.dump(dataset_x, f, protocol=3) with open('dataset_y.pickle', 'wb') as f: pickle.dump(dataset_y, f, protocol=3) return dataset_x, dataset_y
def main(): BoardShim.enable_dev_board_logger() # use synthetic board for demo params = BrainFlowInputParams() board_id = BoardIds.SYNTHETIC_BOARD.value sampling_rate = BoardShim.get_sampling_rate(board_id) board = BoardShim(board_id, params) board.prepare_session() board.start_stream() BoardShim.log_message(LogLevels.LEVEL_INFO.value, 'start sleeping in the main thread') time.sleep(10) data = board.get_current_board_data( DataFilter.get_nearest_power_of_two(sampling_rate)) board.stop_stream() board.release_session() eeg_channels = BoardShim.get_eeg_channels(board_id) # demo for transforms for count, channel in enumerate(eeg_channels): print('Original data for channel %d:' % channel) print(data[channel]) # demo for wavelet transforms # wavelet_coeffs format is[A(J) D(J) D(J-1) ..... D(1)] where J is decomposition level, A - app coeffs, D - detailed coeffs # lengths array stores lengths for each block wavelet_coeffs, lengths = DataFilter.perform_wavelet_transform( data[channel], 'db5', 3) app_coefs = wavelet_coeffs[0:lengths[0]] detailed_coeffs_first_block = wavelet_coeffs[lengths[0]:lengths[1]] # you can do smth with wavelet coeffs here, for example denoising works via thresholds # for wavelets coefficients restored_data = DataFilter.perform_inverse_wavelet_transform( (wavelet_coeffs, lengths), data[channel].shape[0], 'db5', 3) print('Restored data after wavelet transform for channel %d:' % channel) print(restored_data) # demo for fft, len of data must be a power of 2 fft_data = DataFilter.perform_fft(data[channel], WindowFunctions.NO_WINDOW.value) # len of fft_data is N / 2 + 1 restored_fft_data = DataFilter.perform_ifft(fft_data) print('Restored data after fft for channel %d:' % channel) print(restored_fft_data)
def main(): BoardShim.enable_board_logger() DataFilter.enable_data_logger() MLModel.enable_ml_logger() params = BrainFlowInputParams() board = BoardShim(BoardIds.BRAINBIT_BOARD.value, params) master_board_id = board.get_board_id() sampling_rate = BoardShim.get_sampling_rate(master_board_id) board.prepare_session() board.start_stream(45000, '') eeg_channels = BoardShim.get_eeg_channels(int(master_board_id)) while True: BoardShim.log_message(LogLevels.LEVEL_INFO.value, 'start sleeping in the main thread') time.sleep( 5 ) # recommended window size for eeg metric calculation is at least 4 seconds, bigger is better data = board.get_board_data() bands = DataFilter.get_avg_band_powers(data, eeg_channels, sampling_rate, True) feature_vector = np.concatenate((bands[0], bands[1])) print(feature_vector) # calc concentration concentration_params = BrainFlowModelParams( BrainFlowMetrics.CONCENTRATION.value, BrainFlowClassifiers.KNN.value) concentration = MLModel(concentration_params) concentration.prepare() print('Concentration: %f' % concentration.predict(feature_vector)) concentration.release() # calc relaxation relaxation_params = BrainFlowModelParams( BrainFlowMetrics.RELAXATION.value, BrainFlowClassifiers.REGRESSION.value) relaxation = MLModel(relaxation_params) relaxation.prepare() print('Relaxation: %f' % relaxation.predict(feature_vector)) relaxation.release() board.stop_stream() board.release_session()
def main(): print('BoardShim version: ' + BoardShim.get_version()) print('DataFilter version: ' + DataFilter.get_version()) print('MLModel version: ' + MLModel.get_version()) BoardShim.enable_dev_board_logger() board = BoardShim(BoardIds.SYNTHETIC_BOARD.value, BrainFlowInputParams()) board.prepare_session() board.start_stream() time.sleep(10) board.stop_stream() data = board.get_board_data() print(DataFilter.calc_stddev(data[2])) data = board.get_board_data() print(data) data = board.get_current_board_data(10) print(data) board.release_session()