Ejemplo n.º 1
0
def load_file(file_name, standardize, demean):
    bbci_set = BBCIDataset(file_name,
                          load_sensor_names=get_EEG_sensors_sorted())
    log.info("Loading...")
    cnt = bbci_set.load()
    log.info("Set cz to zero and remove high absolute value trials")
    marker_def = dict([(str(i_class), [i_class])  for i_class in xrange(1,5)])
    clean_result = MaxAbsCleaner(threshold=800,
                                                  marker_def=marker_def,
                                                  segment_ival=[0,4000]).clean(cnt)
    
    cnt = restrict_cnt(cnt, marker_def.values(), clean_result.clean_trials, 
        clean_result.rejected_chan_names, copy_data=False)
    cnt = set_channel_to_zero(cnt, 'Cz')
    
    
    log.info("Resampling...")
    cnt = resample_cnt(cnt, newfs=250.0)
    
    log.info("Car filtering...")
    cnt = common_average_reference_cnt(cnt)
    if standardize:
        log.info("Standardizing...")
        cnt = exponential_standardize_cnt(cnt)
    if demean: 
        log.info("Demeaning...")
        cnt = exponential_demean_cnt(cnt)
    
    return cnt
Ejemplo n.º 2
0
def load_file(file_name, standardize, demean):
    bbci_set = BBCIDataset(file_name,
                           load_sensor_names=get_EEG_sensors_sorted())
    log.info("Loading...")
    cnt = bbci_set.load()
    log.info("Set cz to zero and remove high absolute value trials")
    marker_def = dict([(str(i_class), [i_class]) for i_class in xrange(1, 5)])
    clean_result = MaxAbsCleaner(threshold=800,
                                 marker_def=marker_def,
                                 segment_ival=[0, 4000]).clean(cnt)

    cnt = restrict_cnt(cnt,
                       marker_def.values(),
                       clean_result.clean_trials,
                       clean_result.rejected_chan_names,
                       copy_data=False)
    cnt = set_channel_to_zero(cnt, 'Cz')

    log.info("Resampling...")
    cnt = resample_cnt(cnt, newfs=250.0)

    log.info("Car filtering...")
    cnt = common_average_reference_cnt(cnt)
    if standardize:
        log.info("Standardizing...")
        cnt = exponential_standardize_cnt(cnt)
    if demean:
        log.info("Demeaning...")
        cnt = exponential_demean_cnt(cnt)

    return cnt
Ejemplo n.º 3
0
def send_file_data():
    print("Loading Experiment...")
    # Use model to get cnt preprocessors
    base_name = 'data/models/online/cnt/shallow-uneven-trials/9'
    exp = create_experiment(base_name + '.yaml')

    print("Loading File...")
    offline_execution_set = BBCIDataset('data/four-sec-dry-32-sensors/cabin/'
        'MaVo2_sahara32_realMovementS001R02_ds10_1-5.BBCI.mat')
    cnt = offline_execution_set.load()
    print("Running preprocessings...")
    cnt_preprocs = exp.dataset.cnt_preprocessors
    assert cnt_preprocs[-1][0].__name__ == 'exponential_standardize_cnt'
    # Do not do standardizing as it will be done by coordinator
    for preproc, kwargs in cnt_preprocs[:-1]:
        cnt = preproc(cnt, **kwargs)
    cnt_data = cnt.data.astype(np.float32)
    assert not np.any(np.isnan(cnt_data))
    assert not np.any(np.isinf(cnt_data))
    assert not np.any(np.isneginf(cnt_data))
    print("max possible block", np.ceil(len(cnt_data) / 50.0))
    y_labels = create_y_labels(cnt).astype(np.float32)
    assert np.array_equal(np.unique(y_labels), range(5)), ("Should only have "
        "labels 0-4")
    print("Done.")
    s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
    s.connect(("127.0.0.1", 1234))
    
    chan_names = ['Fp1', 'Fpz', 'Fp2', 'AF7', 'AF3',
            'AFz', 'AF4', 'AF8', 'F5', 'F3', 'F1', 'Fz', 'F2', 'F4', 'F6',
            'FC1', 'FCz', 'FC2', 'C3', 'C1', 'Cz', 'C2', 'C4', 'CP3', 'CP1',
             'CPz', 'CP2', 'CP4', 'P1', 'Pz', 'P2', 'POz', 'marker']
    
    chan_line = " ".join(chan_names) + "\n"
    s.send(chan_line)
    n_chans = 33
    n_samples = 50
    s.send(np.array([n_chans], dtype=np.int32).tobytes())
    s.send(np.array([n_samples], dtype=np.int32).tobytes())
    print("Sending data...")
    i_block = 0 # if setting i_block to sth higher, printed results will incorrect
    max_stop_block = np.ceil(len(cnt_data) / float(n_samples))
    stop_block = 800
    assert stop_block < max_stop_block
    while i_block < stop_block:
        arr = cnt_data[i_block * n_samples:i_block*n_samples + n_samples,:].T
        this_y = y_labels[i_block * n_samples:i_block*n_samples + n_samples]
        # chan x time
        arr = np.concatenate((arr, this_y[np.newaxis, :]), axis=0).astype(np.float32)
        s.send(arr.tobytes(order='F'))
        assert arr.shape == (n_chans, n_samples)
        i_block +=1
        gevent.sleep(0.01)
    print("Done.")
    return cnt
Ejemplo n.º 4
0
def run(only_create_exp, cleaner, min_freq, max_freq, last_low_freq, low_width,
        high_width, low_overlap, high_overlap, filename, restricted_n_trials,
        n_folds, segment_ival, save_base_name, orig_params):
    starttime = time.time()
    marker_def = {'1': [1], '2': [2], '4': [4]}

    load_sensor_names = None  #['2LA','4Z', '5R'] # None means all

    set_loader = BBCIDataset(filename, load_sensor_names=load_sensor_names)

    sensor_names_after_cleaning = None  # none means all
    csp_experiment = CSPExperiment(
        set_loader,
        sensor_names=sensor_names_after_cleaning,
        cleaner=cleaner,
        resample_fs=250,
        min_freq=min_freq,
        max_freq=max_freq,
        last_low_freq=last_low_freq,
        low_width=low_width,
        high_width=high_width,
        low_overlap=low_overlap,
        high_overlap=high_overlap,
        filt_order=4,
        segment_ival=segment_ival,  # trial interval
        standardize_filt_cnt=False,
        standardize_epo=False,  # standardize the data?..
        n_folds=n_folds,
        n_top_bottom_csp_filters=
        5,  # this number times two will be number of csp filters per filterband before feature selection 
        n_selected_filterbands=None,  # how many filterbands to select?
        n_selected_features=
        20,  # how many Features to select with the feature selection?
        forward_steps=2,  # feature selection param
        backward_steps=1,  # feature selection param
        stop_when_no_improvement=False,  # feature selection param
        only_last_fold=
        True,  # Split into number of folds, but only run the last fold (i.e. last fold as test fold)?
        restricted_n_trials=
        restricted_n_trials,  # restrict to certain number of _clean_ trials?
        common_average_reference=False,
        ival_optimizer=None,  # optimize the trial ival with some optimizer?
        shuffle=False,  # shuffle or do blockwise folds?
        marker_def=marker_def)
    result_file_name = save_base_name + '.result.pkl'
    csp_experiment.run()
    endtime = time.time()
    result = CSPResult(csp_trainer=csp_experiment,
                       parameters=orig_params,
                       training_time=endtime - starttime)
    with open(result_file_name, 'w') as resultfile:
        log.info("Saving to {:s}...\n".format(result_file_name))
        pickle.dump(result, resultfile)
Ejemplo n.º 5
0
def construct_folder_combined_set(folder_names, sensor_names, cnt_preprocessors,
                            marker_def, end_marker_def, trial_classes,
                          trial_start_offset_ms, trial_stop_offset_ms,
                           min_break_length_ms, max_break_length_ms,
                          break_start_offset_ms, break_stop_offset_ms,
                          last_set_split_trial, add_trial_breaks=True,
                          folder_name_to_extra_args=None):
    """ extra_args should be dict filename -> args that are different for
    this file."""
    default_args = deepcopy(locals())
    sets = []

    if folder_name_to_extra_args is not None:
        for folder_name_with_args in folder_name_to_extra_args:
            assert folder_name_with_args in folder_names

    for i_folder, folder_name in enumerate(folder_names):
        this_args = copy(default_args)
        if folder_name_to_extra_args is not None and (
                folder_name in folder_name_to_extra_args):
            for key in folder_name_to_extra_args[folder_name]:
                assert key in this_args
                this_args[key] = folder_name_to_extra_args[folder_name][key]
                assert key != 'last_set_split_trial', "Does not make sense :)"
        marker_segmenter = MarkerSegmenter(segment_ival=[
            this_args['trial_start_offset_ms'], 
            this_args['trial_stop_offset_ms']],
            marker_def=this_args['marker_def'],
            trial_classes=this_args['trial_classes'],
            end_marker_def=this_args['end_marker_def'])
        trial_break_adder = AddTrialBreaks(min_length_ms=this_args['min_break_length_ms'],
            max_length_ms=this_args['max_break_length_ms'], 
            start_offset_ms=this_args['break_start_offset_ms'], 
            stop_offset_ms=this_args['break_stop_offset_ms'],
            start_marker_def=this_args['marker_def'],
            end_marker_def=this_args['end_marker_def'])
        if (i_folder < len(folder_names) - 1) or (
                this_args['last_set_split_trial'] is None):
            segmenters = [marker_segmenter,]
        else:
            segmenters = [marker_segmenter,
             RestrictTrialRange(0,this_args['last_set_split_trial'])]
        if this_args['add_trial_breaks']:
            segmenters.append(trial_break_adder)
        segmenter  = PipelineSegmenter(segmenters)
        filenames = sorted(glob(os.path.join(folder_name, '*.BBCI.mat')))
        cnt_set = SetWithMarkers(MultipleBBCIDataset(filenames,
                              load_sensor_names=this_args['sensor_names']),
                  this_args['cnt_preprocessors'],
                  segmenter)        
        sets.append(cnt_set)

    # add last set last part as test set if you split apart last set
    # we use that this_args is now from last set already
    if last_set_split_trial is not None:
        segmenters = [marker_segmenter,
             RestrictTrialRange(last_set_split_trial,None),]
        if this_args['add_trial_breaks']:
            segmenters.append(trial_break_adder)
        segmenter  = PipelineSegmenter(segmenters)
        cnt_set = SetWithMarkers(BBCIDataset(filenames[-1], # again last file needed
                              load_sensor_names=this_args['sensor_names']),
                  this_args['cnt_preprocessors'],
                  segmenter)
        sets.append(cnt_set)
    dataset = CombinedSet(sets)
    return dataset
Ejemplo n.º 6
0
def send_file_data():
    print("Loading Experiment...")
    # Use model to get cnt preprocessors
    base_name = 'data/models/online/cnt/start-end-mrk/125'
    exp = create_experiment(base_name + '.yaml')

    print("Loading File...")
    offline_execution_set = BBCIDataset(
        'data/robot-hall/NiRiNBD15_cursor_250Hz.BBCI.mat',
        load_sensor_names=get_nico_sensors())
    cnt = offline_execution_set.load()
    log.info("Preprocessing...")
    cnt = resample_cnt(cnt, newfs=100)
    cnt = lowpass_cnt(cnt, high_cut_off_hz=40, filt_order=10)
    log.info("Done.")
    cnt_data = cnt.data.astype(np.float32)
    assert not np.any(np.isnan(cnt_data))
    assert not np.any(np.isinf(cnt_data))
    assert not np.any(np.isneginf(cnt_data))
    print("max possible block", np.ceil(len(cnt_data) / 50.0))
    segmenter = MarkerSegmenter(
        segment_ival=(500, 0),
        marker_def={
            'Right Hand': [1],
            'Feet': [2],
            'Rotation': [3],
            'Words': [4],
            'Rest': [5]
        },
        trial_classes=['Right Hand', 'Feet', 'Rotation', 'Words', 'Rest'],
        end_marker_def={
            'Right Hand': [10],
            'Feet': [20],
            'Rotation': [30],
            'Words': [40],
            'Rest': [50],
        },
    )
    cnt_y, class_names = segmenter.segment(cnt)
    has_marker = np.sum(cnt_y, axis=1) > 0
    new_y = np.zeros(cnt_y.shape[0], dtype=np.int32)
    new_y[has_marker] = (np.argmax(cnt_y[has_marker], axis=1) + 1)
    print("Done.")
    s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
    s.connect(("127.0.0.1", 7987))

    chan_names = [
        'Fp1', 'Fpz', 'Fp2', 'AF7', 'AF3', 'AF4', 'AF8', 'F7', 'F5', 'F3',
        'F1', 'Fz', 'F2', 'F4', 'F6', 'F8', 'FT7', 'FC5', 'FC3', 'FC1', 'FCz',
        'FC2', 'FC4', 'FC6', 'FT8', 'M1', 'T7', 'C5', 'C3', 'C1', 'Cz', 'C2',
        'C4', 'C6', 'T8', 'M2', 'TP7', 'CP5', 'CP3', 'CP1', 'CPz', 'CP2',
        'CP4', 'CP6', 'TP8', 'P7', 'P5', 'P3', 'P1', 'Pz', 'P2', 'P4', 'P6',
        'P8', 'PO7', 'PO5', 'PO3', 'POz', 'PO4', 'PO6', 'PO8', 'O1', 'Oz',
        'O2', 'marker'
    ]

    chan_line = " ".join(chan_names) + "\n"
    s.send(chan_line)
    n_chans = 65
    n_samples = 50
    s.send(np.array([n_chans], dtype=np.int32).tobytes())
    s.send(np.array([n_samples], dtype=np.int32).tobytes())
    print("Sending data...")
    i_block = 0  # if setting i_block to sth higher, printed results will incorrect
    max_stop_block = np.ceil(len(cnt_data) / float(n_samples))
    stop_block = 800
    assert stop_block < max_stop_block
    while i_block < stop_block:
        arr = cnt_data[i_block * n_samples:i_block * n_samples +
                       n_samples, :].T
        this_y = new_y[i_block * n_samples:i_block * n_samples + n_samples]
        # chan x time
        arr = np.concatenate((arr, this_y[np.newaxis, :]),
                             axis=0).astype(np.float32)
        s.send(arr.tobytes(order='F'))
        assert arr.shape == (n_chans, n_samples)
        i_block += 1
        gevent.sleep(0.03)
    print("Done.")
    return cnt