def load_file(file_name, standardize, demean): bbci_set = BBCIDataset(file_name, load_sensor_names=get_EEG_sensors_sorted()) log.info("Loading...") cnt = bbci_set.load() log.info("Set cz to zero and remove high absolute value trials") marker_def = dict([(str(i_class), [i_class]) for i_class in xrange(1,5)]) clean_result = MaxAbsCleaner(threshold=800, marker_def=marker_def, segment_ival=[0,4000]).clean(cnt) cnt = restrict_cnt(cnt, marker_def.values(), clean_result.clean_trials, clean_result.rejected_chan_names, copy_data=False) cnt = set_channel_to_zero(cnt, 'Cz') log.info("Resampling...") cnt = resample_cnt(cnt, newfs=250.0) log.info("Car filtering...") cnt = common_average_reference_cnt(cnt) if standardize: log.info("Standardizing...") cnt = exponential_standardize_cnt(cnt) if demean: log.info("Demeaning...") cnt = exponential_demean_cnt(cnt) return cnt
def load_file(file_name, standardize, demean): bbci_set = BBCIDataset(file_name, load_sensor_names=get_EEG_sensors_sorted()) log.info("Loading...") cnt = bbci_set.load() log.info("Set cz to zero and remove high absolute value trials") marker_def = dict([(str(i_class), [i_class]) for i_class in xrange(1, 5)]) clean_result = MaxAbsCleaner(threshold=800, marker_def=marker_def, segment_ival=[0, 4000]).clean(cnt) cnt = restrict_cnt(cnt, marker_def.values(), clean_result.clean_trials, clean_result.rejected_chan_names, copy_data=False) cnt = set_channel_to_zero(cnt, 'Cz') log.info("Resampling...") cnt = resample_cnt(cnt, newfs=250.0) log.info("Car filtering...") cnt = common_average_reference_cnt(cnt) if standardize: log.info("Standardizing...") cnt = exponential_standardize_cnt(cnt) if demean: log.info("Demeaning...") cnt = exponential_demean_cnt(cnt) return cnt
def send_file_data(): print("Loading Experiment...") # Use model to get cnt preprocessors base_name = 'data/models/online/cnt/shallow-uneven-trials/9' exp = create_experiment(base_name + '.yaml') print("Loading File...") offline_execution_set = BBCIDataset('data/four-sec-dry-32-sensors/cabin/' 'MaVo2_sahara32_realMovementS001R02_ds10_1-5.BBCI.mat') cnt = offline_execution_set.load() print("Running preprocessings...") cnt_preprocs = exp.dataset.cnt_preprocessors assert cnt_preprocs[-1][0].__name__ == 'exponential_standardize_cnt' # Do not do standardizing as it will be done by coordinator for preproc, kwargs in cnt_preprocs[:-1]: cnt = preproc(cnt, **kwargs) cnt_data = cnt.data.astype(np.float32) assert not np.any(np.isnan(cnt_data)) assert not np.any(np.isinf(cnt_data)) assert not np.any(np.isneginf(cnt_data)) print("max possible block", np.ceil(len(cnt_data) / 50.0)) y_labels = create_y_labels(cnt).astype(np.float32) assert np.array_equal(np.unique(y_labels), range(5)), ("Should only have " "labels 0-4") print("Done.") s = socket.socket(socket.AF_INET, socket.SOCK_STREAM) s.connect(("127.0.0.1", 1234)) chan_names = ['Fp1', 'Fpz', 'Fp2', 'AF7', 'AF3', 'AFz', 'AF4', 'AF8', 'F5', 'F3', 'F1', 'Fz', 'F2', 'F4', 'F6', 'FC1', 'FCz', 'FC2', 'C3', 'C1', 'Cz', 'C2', 'C4', 'CP3', 'CP1', 'CPz', 'CP2', 'CP4', 'P1', 'Pz', 'P2', 'POz', 'marker'] chan_line = " ".join(chan_names) + "\n" s.send(chan_line) n_chans = 33 n_samples = 50 s.send(np.array([n_chans], dtype=np.int32).tobytes()) s.send(np.array([n_samples], dtype=np.int32).tobytes()) print("Sending data...") i_block = 0 # if setting i_block to sth higher, printed results will incorrect max_stop_block = np.ceil(len(cnt_data) / float(n_samples)) stop_block = 800 assert stop_block < max_stop_block while i_block < stop_block: arr = cnt_data[i_block * n_samples:i_block*n_samples + n_samples,:].T this_y = y_labels[i_block * n_samples:i_block*n_samples + n_samples] # chan x time arr = np.concatenate((arr, this_y[np.newaxis, :]), axis=0).astype(np.float32) s.send(arr.tobytes(order='F')) assert arr.shape == (n_chans, n_samples) i_block +=1 gevent.sleep(0.01) print("Done.") return cnt
def send_file_data(): print("Loading Experiment...") # Use model to get cnt preprocessors base_name = 'data/models/online/cnt/start-end-mrk/125' exp = create_experiment(base_name + '.yaml') print("Loading File...") offline_execution_set = BBCIDataset( 'data/robot-hall/NiRiNBD15_cursor_250Hz.BBCI.mat', load_sensor_names=get_nico_sensors()) cnt = offline_execution_set.load() log.info("Preprocessing...") cnt = resample_cnt(cnt, newfs=100) cnt = lowpass_cnt(cnt, high_cut_off_hz=40, filt_order=10) log.info("Done.") cnt_data = cnt.data.astype(np.float32) assert not np.any(np.isnan(cnt_data)) assert not np.any(np.isinf(cnt_data)) assert not np.any(np.isneginf(cnt_data)) print("max possible block", np.ceil(len(cnt_data) / 50.0)) segmenter = MarkerSegmenter( segment_ival=(500, 0), marker_def={ 'Right Hand': [1], 'Feet': [2], 'Rotation': [3], 'Words': [4], 'Rest': [5] }, trial_classes=['Right Hand', 'Feet', 'Rotation', 'Words', 'Rest'], end_marker_def={ 'Right Hand': [10], 'Feet': [20], 'Rotation': [30], 'Words': [40], 'Rest': [50], }, ) cnt_y, class_names = segmenter.segment(cnt) has_marker = np.sum(cnt_y, axis=1) > 0 new_y = np.zeros(cnt_y.shape[0], dtype=np.int32) new_y[has_marker] = (np.argmax(cnt_y[has_marker], axis=1) + 1) print("Done.") s = socket.socket(socket.AF_INET, socket.SOCK_STREAM) s.connect(("127.0.0.1", 7987)) chan_names = [ 'Fp1', 'Fpz', 'Fp2', 'AF7', 'AF3', 'AF4', 'AF8', 'F7', 'F5', 'F3', 'F1', 'Fz', 'F2', 'F4', 'F6', 'F8', 'FT7', 'FC5', 'FC3', 'FC1', 'FCz', 'FC2', 'FC4', 'FC6', 'FT8', 'M1', 'T7', 'C5', 'C3', 'C1', 'Cz', 'C2', 'C4', 'C6', 'T8', 'M2', 'TP7', 'CP5', 'CP3', 'CP1', 'CPz', 'CP2', 'CP4', 'CP6', 'TP8', 'P7', 'P5', 'P3', 'P1', 'Pz', 'P2', 'P4', 'P6', 'P8', 'PO7', 'PO5', 'PO3', 'POz', 'PO4', 'PO6', 'PO8', 'O1', 'Oz', 'O2', 'marker' ] chan_line = " ".join(chan_names) + "\n" s.send(chan_line) n_chans = 65 n_samples = 50 s.send(np.array([n_chans], dtype=np.int32).tobytes()) s.send(np.array([n_samples], dtype=np.int32).tobytes()) print("Sending data...") i_block = 0 # if setting i_block to sth higher, printed results will incorrect max_stop_block = np.ceil(len(cnt_data) / float(n_samples)) stop_block = 800 assert stop_block < max_stop_block while i_block < stop_block: arr = cnt_data[i_block * n_samples:i_block * n_samples + n_samples, :].T this_y = new_y[i_block * n_samples:i_block * n_samples + n_samples] # chan x time arr = np.concatenate((arr, this_y[np.newaxis, :]), axis=0).astype(np.float32) s.send(arr.tobytes(order='F')) assert arr.shape == (n_chans, n_samples) i_block += 1 gevent.sleep(0.03) print("Done.") return cnt