Exemple #1
0
def load_file(file_name, standardize, demean):
    bbci_set = BBCIDataset(file_name,
                          load_sensor_names=get_EEG_sensors_sorted())
    log.info("Loading...")
    cnt = bbci_set.load()
    log.info("Set cz to zero and remove high absolute value trials")
    marker_def = dict([(str(i_class), [i_class])  for i_class in xrange(1,5)])
    clean_result = MaxAbsCleaner(threshold=800,
                                                  marker_def=marker_def,
                                                  segment_ival=[0,4000]).clean(cnt)
    
    cnt = restrict_cnt(cnt, marker_def.values(), clean_result.clean_trials, 
        clean_result.rejected_chan_names, copy_data=False)
    cnt = set_channel_to_zero(cnt, 'Cz')
    
    
    log.info("Resampling...")
    cnt = resample_cnt(cnt, newfs=250.0)
    
    log.info("Car filtering...")
    cnt = common_average_reference_cnt(cnt)
    if standardize:
        log.info("Standardizing...")
        cnt = exponential_standardize_cnt(cnt)
    if demean: 
        log.info("Demeaning...")
        cnt = exponential_demean_cnt(cnt)
    
    return cnt
Exemple #2
0
def load_file(file_name, standardize, demean):
    bbci_set = BBCIDataset(file_name,
                           load_sensor_names=get_EEG_sensors_sorted())
    log.info("Loading...")
    cnt = bbci_set.load()
    log.info("Set cz to zero and remove high absolute value trials")
    marker_def = dict([(str(i_class), [i_class]) for i_class in xrange(1, 5)])
    clean_result = MaxAbsCleaner(threshold=800,
                                 marker_def=marker_def,
                                 segment_ival=[0, 4000]).clean(cnt)

    cnt = restrict_cnt(cnt,
                       marker_def.values(),
                       clean_result.clean_trials,
                       clean_result.rejected_chan_names,
                       copy_data=False)
    cnt = set_channel_to_zero(cnt, 'Cz')

    log.info("Resampling...")
    cnt = resample_cnt(cnt, newfs=250.0)

    log.info("Car filtering...")
    cnt = common_average_reference_cnt(cnt)
    if standardize:
        log.info("Standardizing...")
        cnt = exponential_standardize_cnt(cnt)
    if demean:
        log.info("Demeaning...")
        cnt = exponential_demean_cnt(cnt)

    return cnt
Exemple #3
0
 def load(self):
     cnt = self.set_loaders[0].load()
     for loader in self.set_loaders[1:]:
         next_cnt = loader.load()
         # always sample down to lowest common denominator
         if next_cnt.fs > cnt.fs:
             log.warn("Next set has larger sampling rate ({:d}) "
                      "than before ({:d}), resampling next set".format(
                          next_cnt.fs, cnt.fs))
             next_cnt = resample_cnt(next_cnt, cnt.fs)
         if next_cnt.fs < cnt.fs:
             log.warn("Next set has smaller sampling rate ({:d}) "
                      "than before ({:d}), resampling set so far".format(
                          next_cnt.fs, cnt.fs))
             cnt = resample_cnt(cnt, next_cnt.fs)
         cnt = append_cnt(cnt, next_cnt)
     return cnt
Exemple #4
0
 def preprocess_test_set(self):
     if self.sensor_names is not None:
         self.sensor_names = sort_topologically(self.sensor_names)
         self.test_cnt = select_channels(self.test_cnt, self.sensor_names)
     if self.set_cz_to_zero is True:
         self.test_cnt = set_channel_to_zero(self.test_cnt, 'Cz')
     if self.resample_fs is not None:
         self.test_cnt = resample_cnt(self.test_cnt, newfs=self.resample_fs)
     if self.common_average_reference is True:
         self.test_cnt = common_average_reference_cnt(self.test_cnt)
     if self.standardize_cnt is True:
         self.test_cnt = exponential_standardize_cnt(self.test_cnt)
Exemple #5
0
 def preprocess_test_set(self):
     if self.sensor_names is not None:
         self.sensor_names = sort_topologically(self.sensor_names)
         self.test_cnt = select_channels(self.test_cnt, self.sensor_names)
     if self.set_cz_to_zero is True:
         self.test_cnt = set_channel_to_zero(self.test_cnt, 'Cz')
     if self.resample_fs is not None:
         self.test_cnt = resample_cnt(self.test_cnt, newfs=self.resample_fs)
     if self.common_average_reference is True:
         self.test_cnt = common_average_reference_cnt(self.test_cnt)
     if self.standardize_cnt is True:
         self.test_cnt = exponential_standardize_cnt(self.test_cnt)
Exemple #6
0
    def preprocess_set(self):
        # only remove rejected channels now so that clean function can
        # be called multiple times without changing cleaning results
        self.cnt = select_channels(self.cnt, self.rejected_chan_names,
            invert=True)
        if self.sensor_names is not None:
            # Note this does not respect order of sensor names,
            # it selects chans form given sensor names
            # but keeps original order
            self.cnt = select_channels(self.cnt, self.sensor_names)

        if self.set_cz_to_zero is True:
            self.cnt = set_channel_to_zero(self.cnt, 'Cz')
        if self.resample_fs is not None:
            self.cnt = resample_cnt(self.cnt, newfs=self.resample_fs)
        if self.common_average_reference is True:
            self.cnt = common_average_reference_cnt(self.cnt)
        if self.standardize_cnt is True:
            self.cnt = exponential_standardize_cnt(self.cnt)
Exemple #7
0
    def preprocess_set(self):
        # only remove rejected channels now so that clean function can
        # be called multiple times without changing cleaning results
        self.cnt = select_channels(self.cnt,
                                   self.rejected_chan_names,
                                   invert=True)
        if self.sensor_names is not None:
            # Note this does not respect order of sensor names,
            # it selects chans form given sensor names
            # but keeps original order
            self.cnt = select_channels(self.cnt, self.sensor_names)

        if self.set_cz_to_zero is True:
            self.cnt = set_channel_to_zero(self.cnt, 'Cz')
        if self.resample_fs is not None:
            self.cnt = resample_cnt(self.cnt, newfs=self.resample_fs)
        if self.common_average_reference is True:
            self.cnt = common_average_reference_cnt(self.cnt)
        if self.standardize_cnt is True:
            self.cnt = exponential_standardize_cnt(self.cnt)
Exemple #8
0
def send_file_data():
    print("Loading Experiment...")
    # Use model to get cnt preprocessors
    base_name = 'data/models/online/cnt/start-end-mrk/125'
    exp = create_experiment(base_name + '.yaml')

    print("Loading File...")
    offline_execution_set = BBCIDataset(
        'data/robot-hall/NiRiNBD15_cursor_250Hz.BBCI.mat',
        load_sensor_names=get_nico_sensors())
    cnt = offline_execution_set.load()
    log.info("Preprocessing...")
    cnt = resample_cnt(cnt, newfs=100)
    cnt = lowpass_cnt(cnt, high_cut_off_hz=40, filt_order=10)
    log.info("Done.")
    cnt_data = cnt.data.astype(np.float32)
    assert not np.any(np.isnan(cnt_data))
    assert not np.any(np.isinf(cnt_data))
    assert not np.any(np.isneginf(cnt_data))
    print("max possible block", np.ceil(len(cnt_data) / 50.0))
    segmenter = MarkerSegmenter(
        segment_ival=(500, 0),
        marker_def={
            'Right Hand': [1],
            'Feet': [2],
            'Rotation': [3],
            'Words': [4],
            'Rest': [5]
        },
        trial_classes=['Right Hand', 'Feet', 'Rotation', 'Words', 'Rest'],
        end_marker_def={
            'Right Hand': [10],
            'Feet': [20],
            'Rotation': [30],
            'Words': [40],
            'Rest': [50],
        },
    )
    cnt_y, class_names = segmenter.segment(cnt)
    has_marker = np.sum(cnt_y, axis=1) > 0
    new_y = np.zeros(cnt_y.shape[0], dtype=np.int32)
    new_y[has_marker] = (np.argmax(cnt_y[has_marker], axis=1) + 1)
    print("Done.")
    s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
    s.connect(("127.0.0.1", 7987))

    chan_names = [
        'Fp1', 'Fpz', 'Fp2', 'AF7', 'AF3', 'AF4', 'AF8', 'F7', 'F5', 'F3',
        'F1', 'Fz', 'F2', 'F4', 'F6', 'F8', 'FT7', 'FC5', 'FC3', 'FC1', 'FCz',
        'FC2', 'FC4', 'FC6', 'FT8', 'M1', 'T7', 'C5', 'C3', 'C1', 'Cz', 'C2',
        'C4', 'C6', 'T8', 'M2', 'TP7', 'CP5', 'CP3', 'CP1', 'CPz', 'CP2',
        'CP4', 'CP6', 'TP8', 'P7', 'P5', 'P3', 'P1', 'Pz', 'P2', 'P4', 'P6',
        'P8', 'PO7', 'PO5', 'PO3', 'POz', 'PO4', 'PO6', 'PO8', 'O1', 'Oz',
        'O2', 'marker'
    ]

    chan_line = " ".join(chan_names) + "\n"
    s.send(chan_line)
    n_chans = 65
    n_samples = 50
    s.send(np.array([n_chans], dtype=np.int32).tobytes())
    s.send(np.array([n_samples], dtype=np.int32).tobytes())
    print("Sending data...")
    i_block = 0  # if setting i_block to sth higher, printed results will incorrect
    max_stop_block = np.ceil(len(cnt_data) / float(n_samples))
    stop_block = 800
    assert stop_block < max_stop_block
    while i_block < stop_block:
        arr = cnt_data[i_block * n_samples:i_block * n_samples +
                       n_samples, :].T
        this_y = new_y[i_block * n_samples:i_block * n_samples + n_samples]
        # chan x time
        arr = np.concatenate((arr, this_y[np.newaxis, :]),
                             axis=0).astype(np.float32)
        s.send(arr.tobytes(order='F'))
        assert arr.shape == (n_chans, n_samples)
        i_block += 1
        gevent.sleep(0.03)
    print("Done.")
    return cnt