def create_dataset(data_folder, subject_id, train_start_ms, low_cut_hz,
                   filt_order, clean_train):
    load_sensor_names = None
    train_filename = 'A{:02d}T.mat'.format(subject_id)
    test_filename = 'A{:02d}E.mat'.format(subject_id)
    train_filepath = os.path.join(data_folder, train_filename)
    test_filepath = os.path.join(data_folder, test_filename)

    # trial ivan in milliseconds
    # these are the samples that will be predicted, so for a
    # network with 2000ms receptive field
    # 1500 means the first receptive field goes from -500 to 1500
    train_segment_ival = [train_start_ms, 4000]
    test_segment_ival = [train_start_ms, 4000]

    train_loader = BCICompetition4Set2A(train_filepath,
                                        load_sensor_names=load_sensor_names)
    test_loader = BCICompetition4Set2A(test_filepath,
                                       load_sensor_names=load_sensor_names)

    # Preprocessing pipeline in [(function, {args:values)] logic
    cnt_preprocessors = [
        (resample_cnt, {'newfs': 250.0}),
        (bandpass_cnt, {
            'low_cut_hz': low_cut_hz,
            'high_cut_hz': 38,
            'filt_order': filt_order,
        }),
        (exponential_standardize_cnt, {})
    ]

    marker_def = {'1- Right Hand': [1], '2 - Left Hand': [2], '3 - Rest': [3],
                  '4 - Feet': [4]}

    train_signal_proc = SignalProcessor(set_loader=train_loader,
                                        segment_ival=train_segment_ival,
                                        cnt_preprocessors=cnt_preprocessors,
                                        marker_def=marker_def)
    train_set = CntSignalMatrix(signal_processor=train_signal_proc,
                                sensor_names='all')

    test_signal_proc = SignalProcessor(set_loader=test_loader,
                                       segment_ival=test_segment_ival,
                                       cnt_preprocessors=cnt_preprocessors,
                                       marker_def=marker_def)
    test_set = CntSignalMatrix(signal_processor=test_signal_proc,
                               sensor_names='all')

    if clean_train:
        train_cleaner = BCICompetitionIV2ABArtefactMaskCleaner(
            marker_def=marker_def)
    else:
        train_cleaner = NoCleaner()
    test_cleaner = BCICompetitionIV2ABArtefactMaskCleaner(marker_def=marker_def)
    combined_set = CombinedCleanedSet(train_set, test_set, train_cleaner,
                                      test_cleaner)
    return combined_set
Ejemplo n.º 2
0
 def construct_sets(self):
     self.sets = []
     for set_arg in self.set_args:
         #Thinkabout: maybe do additional preprocs before?
         (filename, constructor, start_stop, 
             segment_ival, end_marker_def) = set_arg
         if constructor == 'bbci':
             constructor = BBCIDataset
         loader= constructor(filename, 
             load_sensor_names=self.load_sensor_names)
         additional_cnt_preprocs = []
         if start_stop is not None:
             start, stop = start_stop
             assert np.all([len(labels) == 1 for labels in 
                 self.marker_def.values()]), (
                 "Expect only one label per class, otherwise rewrite...")
     
             classes = sorted([labels[0]
                 for labels in self.marker_def.values()])
             if end_marker_def is not None:
                 end_classes = sorted(
                     [labels[0]
                         for labels in end_marker_def.values()])
                 classes.extend(end_classes)
             select_class = [select_marker_classes,
                 dict(classes=classes, copy_data=False)]
             additional_cnt_preprocs.append(select_class)
             if end_marker_def is not None:
                 # since there are start and end markers,
                 # need to multiply indices by 2
                 if start is not None:
                     start = start * 2
                 if stop is not None:
                     stop = stop * 2
             select_epochs = [select_marker_epoch_range,
                 dict(start=start, stop=stop)]
                 
             additional_cnt_preprocs.append(select_epochs)
             
             select_ival = [select_ival_with_markers,
                  dict(segment_ival=segment_ival)]
             additional_cnt_preprocs.append(select_ival)
             
         
         this_cnt_preprocs = (list(self.cnt_preprocessors) +
             additional_cnt_preprocs)
         signal_proc= SignalProcessor(
             set_loader=loader,
             segment_ival=segment_ival,
             cnt_preprocessors=this_cnt_preprocs,
             marker_def=self.marker_def)
         this_set = CntSignalMatrix(signal_processor=signal_proc,
             sensor_names=self.sensor_names,
             end_marker_def = end_marker_def)
         self.sets.append(this_set)
Ejemplo n.º 3
0
def check_as_sets(train_file_name, test_file_name, combined_file_name):
    train_set = BCICompetition4Set2A(train_file_name)
    train_wyrm_set = SignalProcessor(train_set)
    train_wyrm_set.load()
    
    test_set = BCICompetition4Set2A(test_file_name)
    test_wyrm_set = SignalProcessor(test_set,
        marker_def={'Unknown':[-2147483648]})
    test_wyrm_set.load()
    
    combined_set = BCICompetition4Set2A(combined_file_name)
    combined_wyrm_set = SignalProcessor(combined_set)
    combined_wyrm_set.load()
    # nans were made to be means, so ignore that some values are not equal
    train_epo = train_wyrm_set.epo.data
    test_epo = test_wyrm_set.epo.data
    combined_epo = combined_wyrm_set.epo.data
    train_part = combined_epo[:288]
    assert (np.sum(train_epo - train_part!= 0) /
        float(np.prod(train_epo.shape))) < 1e-2
    test_part = combined_epo[288:]
    assert (np.sum(test_epo - test_part) /
        float(np.prod(test_epo.shape))) < 1e-2
    log.info("Set ok")
Ejemplo n.º 4
0
def compute_cleaner(data,
                    eog_data,
                    marker_positions,
                    ival,
                    max_min=2,
                    whisker_percent=5,
                    whisker_length=3):
    """For Cleaner tests..."""
    assert eog_data.shape[0] == data.shape[0]

    axes = [range(data.shape[0]), range(data.shape[1])]
    markers = zip(marker_positions, [0] * len(marker_positions))
    marker_def = {'0': [0]}
    cnt = Data(data, axes=axes, names=['time', 'channels'], units=['ms', '#'])
    cnt.fs = 1000
    cnt.markers = markers

    eog_axes = [range(eog_data.shape[0]), range(eog_data.shape[1])]
    eog_cnt = Data(eog_data,
                   axes=eog_axes,
                   names=['time', 'channels'],
                   units=['ms', '#'])
    eog_cnt.fs = 1000
    eog_cnt.markers = markers
    eog_proc = SignalProcessor(FakeLoader(eog_cnt),
                               segment_ival=ival,
                               marker_def=marker_def)

    cleaner = Cleaner(cnt,
                      eog_proc,
                      rejection_blink_ival=ival,
                      max_min=max_min,
                      rejection_var_ival=ival,
                      whisker_percent=whisker_percent,
                      whisker_length=whisker_length,
                      low_cut_hz=None,
                      high_cut_hz=None,
                      filt_order=None,
                      marker_def=marker_def)
    cleaner.clean()
    return cleaner
Ejemplo n.º 5
0
    def __init__(self,
                 eog_set,
                 rejection_var_ival=[0, 4000],
                 rejection_blink_ival=[-500, 4000],
                 max_min=600,
                 whisker_percent=10,
                 whisker_length=3,
                 marker_def=None,
                 low_cut_hz=0.1,
                 high_cut_hz=None):
        local_vars = locals()
        del local_vars['self']
        self.__dict__.update(local_vars)

        if self.marker_def is None:
            self.marker_def = {
                '1 - Right Hand': [1],
                '2 - Left Hand': [2],
                '3 - Rest': [3],
                '4 - Feet': [4]
            }
        self.eog_set = SignalProcessor(set_loader=self.eog_set,
                                       segment_ival=rejection_blink_ival,
                                       marker_def=self.marker_def)
Ejemplo n.º 6
0
def run(
    ex,
    data_folder,
    subject_id,
    n_chans,
    only_return_exp,
):
    start_time = time.time()
    assert (only_return_exp is False) or (n_chans is not None)
    ex.info['finished'] = False
    load_sensor_names = None
    train_filename = 'A{:02d}T.mat'.format(subject_id)
    test_filename = 'A{:02d}E.mat'.format(subject_id)
    train_filepath = os.path.join(data_folder, train_filename)
    test_filepath = os.path.join(data_folder, test_filename)

    # trial ivan in milliseconds
    # these are the samples that will be predicted, so for a
    # network with 2000ms receptive field
    # 1500 means the first receptive field goes from -500 to 1500
    segment_ival = [1500, 4000]

    train_loader = BCICompetition4Set2A(train_filepath,
                                        load_sensor_names=load_sensor_names)
    test_loader = BCICompetition4Set2A(test_filepath,
                                       load_sensor_names=load_sensor_names)

    # Preprocessing pipeline in [(function, {args:values)] logic
    cnt_preprocessors = [(resample_cnt, {
        'newfs': 250.0
    }), (bandpass_cnt, {
        'low_cut_hz': 0,
        'high_cut_hz': 38,
    }), (exponential_standardize_cnt, {})]

    marker_def = {
        '1- Right Hand': [1],
        '2 - Left Hand': [2],
        '3 - Rest': [3],
        '4 - Feet': [4]
    }

    train_signal_proc = SignalProcessor(set_loader=train_loader,
                                        segment_ival=segment_ival,
                                        cnt_preprocessors=cnt_preprocessors,
                                        marker_def=marker_def)
    train_set = CntSignalMatrix(signal_processor=train_signal_proc,
                                sensor_names='all')

    test_signal_proc = SignalProcessor(set_loader=test_loader,
                                       segment_ival=segment_ival,
                                       cnt_preprocessors=cnt_preprocessors,
                                       marker_def=marker_def)
    test_set = CntSignalMatrix(signal_processor=test_signal_proc,
                               sensor_names='all')

    from braindecode.mywyrm.clean import MaxAbsCleaner
    train_cleaner = MaxAbsCleaner(segment_ival=[0, 4000],
                                  threshold=800,
                                  marker_def=marker_def)
    test_cleaner = MaxAbsCleaner(segment_ival=[0, 4000],
                                 threshold=800,
                                 marker_def=marker_def)
    combined_set = CombinedCleanedSet(train_set, test_set, train_cleaner,
                                      test_cleaner)
    if not only_return_exp:
        combined_set.load()

    in_chans = train_set.get_topological_view().shape[1]
    input_time_length = 1000  # implies how many crops are processed in parallel, does _not_ determine receptive field size
    # receptive field size is determined by model architecture
    num_filters_time = 25
    filter_time_length = 10
    num_filters_spat = 25
    pool_time_length = 3
    pool_time_stride = 3
    num_filters_2 = 50
    filter_length_2 = 10
    num_filters_3 = 100
    filter_length_3 = 10
    num_filters_4 = 200
    filter_length_4 = 10
    final_dense_length = 2
    n_classes = 4
    final_nonlin = softmax
    first_nonlin = elu
    first_pool_mode = 'max'
    first_pool_nonlin = identity
    later_nonlin = elu
    later_pool_mode = 'max'
    later_pool_nonlin = identity
    drop_in_prob = 0.0
    drop_prob = 0.5
    batch_norm_alpha = 0.1
    double_time_convs = False
    split_first_layer = True
    batch_norm = True
    # ensure reproducibility by resetting lasagne/theano random generator
    lasagne.random.set_rng(RandomState(34734))

    d5net = Deep5Net(in_chans=in_chans,
                     input_time_length=input_time_length,
                     num_filters_time=num_filters_time,
                     filter_time_length=filter_time_length,
                     num_filters_spat=num_filters_spat,
                     pool_time_length=pool_time_length,
                     pool_time_stride=pool_time_stride,
                     num_filters_2=num_filters_2,
                     filter_length_2=filter_length_2,
                     num_filters_3=num_filters_3,
                     filter_length_3=filter_length_3,
                     num_filters_4=num_filters_4,
                     filter_length_4=filter_length_4,
                     final_dense_length=final_dense_length,
                     n_classes=n_classes,
                     final_nonlin=final_nonlin,
                     first_nonlin=first_nonlin,
                     first_pool_mode=first_pool_mode,
                     first_pool_nonlin=first_pool_nonlin,
                     later_nonlin=later_nonlin,
                     later_pool_mode=later_pool_mode,
                     later_pool_nonlin=later_pool_nonlin,
                     drop_in_prob=drop_in_prob,
                     drop_prob=drop_prob,
                     batch_norm_alpha=batch_norm_alpha,
                     double_time_convs=double_time_convs,
                     split_first_layer=split_first_layer,
                     batch_norm=batch_norm)
    final_layer = d5net.get_layers()[-1]
    print_layers(final_layer)

    dataset_splitter = SeveralSetsSplitter(valid_set_fraction=0.2,
                                           use_test_as_valid=False)
    iterator = CntWindowTrialIterator(
        batch_size=45,
        input_time_length=input_time_length,
        n_sample_preds=get_n_sample_preds(final_layer))

    monitors = [
        LossMonitor(),
        CntTrialMisclassMonitor(input_time_length=input_time_length),
        RuntimeMonitor()
    ]

    #debug: n_no_decrease_max_epochs = 2
    #debug: n_max_epochs = 4
    n_no_decrease_max_epochs = 80
    n_max_epochs = 800  #100
    # real values for paper were 80 and 800
    stop_criterion = Or([
        NoDecrease('valid_misclass', num_epochs=n_no_decrease_max_epochs),
        MaxEpochs(num_epochs=n_max_epochs)
    ])

    dataset = combined_set
    splitter = dataset_splitter
    loss_expression = categorical_crossentropy
    updates_expression = adam
    updates_modifier = MaxNormConstraintWithDefaults({})
    remember_best_chan = 'valid_misclass'
    run_after_early_stop = True
    exp = Experiment(final_layer,
                     dataset,
                     splitter,
                     None,
                     iterator,
                     loss_expression,
                     updates_expression,
                     updates_modifier,
                     monitors,
                     stop_criterion,
                     remember_best_chan,
                     run_after_early_stop,
                     batch_modifier=None)

    if only_return_exp:
        return exp

    exp.setup()
    exp.run()
    end_time = time.time()
    run_time = end_time - start_time

    ex.info['finished'] = True
    ex.info['runtime'] = run_time
    for key in exp.monitor_chans:
        ex.info[key] = exp.monitor_chans[key][-1]
    save_pkl_artifact(ex, exp.monitor_chans, 'monitor_chans.pkl')
Ejemplo n.º 7
0
def run(ex, data_folder, subject_id, n_chans, train_inds, test_inds,
        sets_like_fbcsp_paper, clean_train, stop_chan, filt_order, low_cut_hz,
        loss_expression, network, only_return_exp, run_after_early_stop):
    start_time = time.time()
    assert (only_return_exp is False) or (n_chans is not None)
    ex.info['finished'] = False

    # trial ival in milliseconds
    # these are the samples that will be predicted, so for a
    # network with 2000ms receptive field
    # 1500 means the first receptive field goes from -500 to 1500
    train_segment_ival = [1500, 4000]
    test_segment_ival = [0, 4000]

    if sets_like_fbcsp_paper:
        if subject_id in [4, 5, 6, 7, 8, 9]:
            train_inds = [3]
        elif subject_id == 1:
            train_inds = [1, 3]
        else:
            assert subject_id in [2, 3]
            train_inds = [1, 2, 3]

    train_loader = MultipleBCICompetition4Set2B(subject_id,
                                                session_ids=train_inds,
                                                data_folder=data_folder)

    test_loader = MultipleBCICompetition4Set2B(subject_id,
                                               session_ids=test_inds,
                                               data_folder=data_folder)

    # Preprocessing pipeline in [(function, {args:values)] logic
    cnt_preprocessors = [(resample_cnt, {
        'newfs': 250.0
    }),
                         (bandpass_cnt, {
                             'low_cut_hz': low_cut_hz,
                             'high_cut_hz': 38,
                             'filt_order': filt_order,
                         }), (exponential_standardize_cnt, {})]

    marker_def = {'1- Left Hand': [1], '2 - Right Hand': [2]}

    train_signal_proc = SignalProcessor(set_loader=train_loader,
                                        segment_ival=train_segment_ival,
                                        cnt_preprocessors=cnt_preprocessors,
                                        marker_def=marker_def)
    train_set = CntSignalMatrix(signal_processor=train_signal_proc,
                                sensor_names='all')

    test_signal_proc = SignalProcessor(set_loader=test_loader,
                                       segment_ival=test_segment_ival,
                                       cnt_preprocessors=cnt_preprocessors,
                                       marker_def=marker_def)
    test_set = CntSignalMatrix(signal_processor=test_signal_proc,
                               sensor_names='all')

    if clean_train:
        train_cleaner = BCICompetitionIV2ABArtefactMaskCleaner(
            marker_def=marker_def)
    else:
        train_cleaner = NoCleaner()
    test_cleaner = BCICompetitionIV2ABArtefactMaskCleaner(
        marker_def=marker_def)
    combined_set = CombinedCleanedSet(train_set, test_set, train_cleaner,
                                      test_cleaner)
    if not only_return_exp:
        combined_set.load()

    lasagne.random.set_rng(RandomState(34734))
    in_chans = train_set.get_topological_view().shape[1]
    input_time_length = 1000  # implies how many crops are processed in parallel, does _not_ determine receptive field size
    # receptive field size is determined by model architecture

    if network == 'deep':
        final_layer = create_deep_net(in_chans, input_time_length)
    else:
        assert network == 'shallow'
        final_layer = create_shallow_net(in_chans, input_time_length)

    dataset_splitter = SeveralSetsSplitter(valid_set_fraction=0.2,
                                           use_test_as_valid=False)
    iterator = CntWindowTrialIterator(
        batch_size=45,
        input_time_length=input_time_length,
        n_sample_preds=get_n_sample_preds(final_layer))

    monitors = [
        LossMonitor(),
        CntTrialMisclassMonitor(input_time_length=input_time_length),
        KappaMonitor(input_time_length=iterator.input_time_length, mode='max'),
        RuntimeMonitor()
    ]

    #debug: n_no_decrease_max_epochs = 2
    #debug: n_max_epochs = 4
    n_no_decrease_max_epochs = 80
    n_max_epochs = 800  #100
    # real values for paper were 80 and 800
    remember_best_chan = 'valid_' + stop_chan
    stop_criterion = Or([
        NoDecrease(remember_best_chan, num_epochs=n_no_decrease_max_epochs),
        MaxEpochs(num_epochs=n_max_epochs)
    ])

    dataset = combined_set
    splitter = dataset_splitter
    updates_expression = adam
    updates_modifier = MaxNormConstraintWithDefaults({})
    exp = Experiment(final_layer,
                     dataset,
                     splitter,
                     None,
                     iterator,
                     loss_expression,
                     updates_expression,
                     updates_modifier,
                     monitors,
                     stop_criterion,
                     remember_best_chan,
                     run_after_early_stop,
                     batch_modifier=None)

    if only_return_exp:
        return exp

    exp.setup()
    exp.run()
    end_time = time.time()
    run_time = end_time - start_time

    ex.info['finished'] = True
    for key in exp.monitor_chans:
        ex.info[key] = exp.monitor_chans[key][-1]
    ex.info['runtime'] = run_time
    save_pkl_artifact(ex, exp.monitor_chans, 'monitor_chans.pkl')
Ejemplo n.º 8
0
def run(
    ex,
    data_folder,
    subject_id,
    n_chans,
    clean_train,
    low_cut_hz,
    train_start_ms,
    kappa_mode,
    loss_expression,
    filt_order,
    only_return_exp,
):
    start_time = time.time()
    assert (only_return_exp is False) or (n_chans is not None)
    ex.info['finished'] = False
    load_sensor_names = None
    train_filename = 'A{:02d}T.mat'.format(subject_id)
    test_filename = 'A{:02d}E.mat'.format(subject_id)
    train_filepath = os.path.join(data_folder, train_filename)
    test_filepath = os.path.join(data_folder, test_filename)

    # trial ivan in milliseconds
    # these are the samples that will be predicted, so for a
    # network with 2000ms receptive field
    # 1500 means the first receptive field goes from -500 to 1500
    train_segment_ival = [train_start_ms, 4000]
    test_segment_ival = [0, 4000]

    train_loader = BCICompetition4Set2A(train_filepath,
                                        load_sensor_names=load_sensor_names)
    test_loader = BCICompetition4Set2A(test_filepath,
                                       load_sensor_names=load_sensor_names)

    # Preprocessing pipeline in [(function, {args:values)] logic
    cnt_preprocessors = [(resample_cnt, {
        'newfs': 250.0
    }),
                         (bandpass_cnt, {
                             'low_cut_hz': low_cut_hz,
                             'high_cut_hz': 38,
                             'filt_order': filt_order,
                         }), (exponential_standardize_cnt, {})]

    marker_def = {
        '1- Right Hand': [1],
        '2 - Left Hand': [2],
        '3 - Rest': [3],
        '4 - Feet': [4]
    }

    train_signal_proc = SignalProcessor(set_loader=train_loader,
                                        segment_ival=train_segment_ival,
                                        cnt_preprocessors=cnt_preprocessors,
                                        marker_def=marker_def)
    train_set = CntSignalMatrix(signal_processor=train_signal_proc,
                                sensor_names='all')

    test_signal_proc = SignalProcessor(set_loader=test_loader,
                                       segment_ival=test_segment_ival,
                                       cnt_preprocessors=cnt_preprocessors,
                                       marker_def=marker_def)
    test_set = CntSignalMatrix(signal_processor=test_signal_proc,
                               sensor_names='all')

    if clean_train:
        train_cleaner = BCICompetitionIV2ABArtefactMaskCleaner(
            marker_def=marker_def)
    else:
        train_cleaner = NoCleaner()
    test_cleaner = BCICompetitionIV2ABArtefactMaskCleaner(
        marker_def=marker_def)
    combined_set = CombinedCleanedSet(train_set, test_set, train_cleaner,
                                      test_cleaner)
    if not only_return_exp:
        combined_set.load()
        in_chans = train_set.get_topological_view().shape[1]
    else:
        in_chans = n_chans
    input_time_length = 1000  # implies how many crops are processed in parallel, does _not_ determine receptive field size
    # receptive field size is determined by model architecture

    # ensure reproducibility by resetting lasagne/theano random generator
    lasagne.random.set_rng(RandomState(34734))
    final_layer = create_deep_net(in_chans, input_time_length)
    print_layers(final_layer)

    dataset_splitter = SeveralSetsSplitter(valid_set_fraction=0.2,
                                           use_test_as_valid=False)
    iterator = CntWindowTrialIterator(
        batch_size=45,
        input_time_length=input_time_length,
        n_sample_preds=get_n_sample_preds(final_layer))

    monitors = [
        LossMonitor(),
        CntTrialMisclassMonitor(input_time_length=input_time_length),
        KappaMonitor(input_time_length=iterator.input_time_length,
                     mode=kappa_mode),
        RuntimeMonitor(),
    ]

    #debug: n_no_decrease_max_epochs = 2
    #debug: n_max_epochs = 4
    n_no_decrease_max_epochs = 80
    n_max_epochs = 800  #100
    # real values for paper were 80 and 800
    stop_criterion = Or([
        NoDecrease('valid_misclass', num_epochs=n_no_decrease_max_epochs),
        MaxEpochs(num_epochs=n_max_epochs)
    ])

    dataset = combined_set
    splitter = dataset_splitter
    updates_expression = adam
    updates_modifier = MaxNormConstraintWithDefaults({})
    remember_best_chan = 'valid_misclass'
    run_after_early_stop = True
    exp = Experiment(final_layer,
                     dataset,
                     splitter,
                     None,
                     iterator,
                     loss_expression,
                     updates_expression,
                     updates_modifier,
                     monitors,
                     stop_criterion,
                     remember_best_chan,
                     run_after_early_stop,
                     batch_modifier=None)

    if only_return_exp:
        return exp

    exp.setup()
    exp.run()
    end_time = time.time()
    run_time = end_time - start_time

    ex.info['finished'] = True
    for key in exp.monitor_chans:
        ex.info[key] = exp.monitor_chans[key][-1]
    ex.info['runtime'] = run_time
    save_pkl_artifact(ex, exp.monitor_chans, 'monitor_chans.pkl')
    save_npy_artifact(ex, lasagne.layers.get_all_param_values(exp.final_layer),
                      'model_params.npy')
Ejemplo n.º 9
0
def run(ex, data_folder, subject_id, n_chans, stop_chan, filt_order,
        low_cut_hz, loss_expression, only_return_exp, run_after_early_stop,
        sets_like_fbcsp_paper):
    start_time = time.time()
    assert (only_return_exp is False) or (n_chans is not None)
    ex.info['finished'] = False

    # trial ivan in milliseconds
    # these are the samples that will be predicted, so for a
    # network with 2000ms receptive field
    # 1500 means the first receptive field goes from -500 to 1500
    train_segment_ival = [1500, 4000]
    test_segment_ival = [1500, 4000]

    add_additional_set = True
    session_ids = [
        1,
        2,
    ]
    if sets_like_fbcsp_paper:
        if subject_id in [4, 5, 6, 7, 8, 9]:
            session_ids = [3]  # dummy
            add_additional_set = False
        elif subject_id == 1:
            session_ids = [
                1,
            ]
        else:
            assert subject_id in [2, 3]
            session_ids = [1, 2]

    train_loader = MultipleBCICompetition4Set2B(subject_id,
                                                session_ids=session_ids,
                                                data_folder=data_folder)

    test_loader = MultipleBCICompetition4Set2B(subject_id,
                                               session_ids=[3],
                                               data_folder=data_folder)

    # Preprocessing pipeline in [(function, {args:values)] logic
    cnt_preprocessors = [(resample_cnt, {
        'newfs': 250.0
    }),
                         (bandpass_cnt, {
                             'low_cut_hz': low_cut_hz,
                             'high_cut_hz': 38,
                             'filt_order': filt_order,
                         }), (exponential_standardize_cnt, {})]

    marker_def = {'1- Left Hand': [1], '2 - Right Hand': [2]}

    train_signal_proc = SignalProcessor(set_loader=train_loader,
                                        segment_ival=train_segment_ival,
                                        cnt_preprocessors=cnt_preprocessors,
                                        marker_def=marker_def)
    train_set = CntSignalMatrix(signal_processor=train_signal_proc,
                                sensor_names='all')

    test_signal_proc = SignalProcessor(set_loader=test_loader,
                                       segment_ival=test_segment_ival,
                                       cnt_preprocessors=cnt_preprocessors,
                                       marker_def=marker_def)
    test_set = CntSignalMatrix(signal_processor=test_signal_proc,
                               sensor_names='all')

    train_cleaner = BCICompetitionIV2ABArtefactMaskCleaner(
        marker_def=marker_def)
    test_cleaner = BCICompetitionIV2ABArtefactMaskCleaner(
        marker_def=marker_def)
    combined_set = CombinedCleanedSet(train_set, test_set, train_cleaner,
                                      test_cleaner)
    if not only_return_exp:
        combined_set.load()
        # only need train set actually, split is done later per fold
        combined_set = combined_set.test_set
        if add_additional_set:
            combined_set.additional_set = train_set

    in_chans = train_set.get_topological_view().shape[1]
    input_time_length = 1000  # implies how many crops are processed in parallel, does _not_ determine receptive field size
    # receptive field size is determined by model architecture
    num_filters_time = 25
    filter_time_length = 10
    num_filters_spat = 25
    pool_time_length = 3
    pool_time_stride = 3
    num_filters_2 = 50
    filter_length_2 = 10
    num_filters_3 = 100
    filter_length_3 = 10
    num_filters_4 = 200
    filter_length_4 = 10
    final_dense_length = 2
    n_classes = 2
    final_nonlin = softmax
    first_nonlin = elu
    first_pool_mode = 'max'
    first_pool_nonlin = identity
    later_nonlin = elu
    later_pool_mode = 'max'
    later_pool_nonlin = identity
    drop_in_prob = 0.0
    drop_prob = 0.5
    batch_norm_alpha = 0.1
    double_time_convs = False
    split_first_layer = True
    batch_norm = True

    def run_exp(i_fold):
        # ensure reproducibility by resetting lasagne/theano random generator
        lasagne.random.set_rng(RandomState(34734))

        d5net = Deep5Net(in_chans=in_chans,
                         input_time_length=input_time_length,
                         num_filters_time=num_filters_time,
                         filter_time_length=filter_time_length,
                         num_filters_spat=num_filters_spat,
                         pool_time_length=pool_time_length,
                         pool_time_stride=pool_time_stride,
                         num_filters_2=num_filters_2,
                         filter_length_2=filter_length_2,
                         num_filters_3=num_filters_3,
                         filter_length_3=filter_length_3,
                         num_filters_4=num_filters_4,
                         filter_length_4=filter_length_4,
                         final_dense_length=final_dense_length,
                         n_classes=n_classes,
                         final_nonlin=final_nonlin,
                         first_nonlin=first_nonlin,
                         first_pool_mode=first_pool_mode,
                         first_pool_nonlin=first_pool_nonlin,
                         later_nonlin=later_nonlin,
                         later_pool_mode=later_pool_mode,
                         later_pool_nonlin=later_pool_nonlin,
                         drop_in_prob=drop_in_prob,
                         drop_prob=drop_prob,
                         batch_norm_alpha=batch_norm_alpha,
                         double_time_convs=double_time_convs,
                         split_first_layer=split_first_layer,
                         batch_norm=batch_norm)
        final_layer = d5net.get_layers()[-1]
        final_layer = ClipLayer(final_layer, 1e-4, 1 - 1e-4)
        dataset_splitter = CntTrialSingleFoldSplitter(n_folds=10,
                                                      i_test_fold=i_fold,
                                                      shuffle=True)
        iterator = CntWindowTrialIterator(
            batch_size=45,
            input_time_length=input_time_length,
            n_sample_preds=get_n_sample_preds(final_layer))

        monitors = [
            LossMonitor(),
            CntTrialMisclassMonitor(input_time_length=input_time_length),
            KappaMonitor(input_time_length=iterator.input_time_length,
                         mode='max'),
            RuntimeMonitor()
        ]

        #n_no_decrease_max_epochs = 2
        #n_max_epochs = 4
        n_no_decrease_max_epochs = 80
        n_max_epochs = 800
        # real values for paper were 80 and 800
        remember_best_chan = 'valid_' + stop_chan
        stop_criterion = Or([
            NoDecrease(remember_best_chan,
                       num_epochs=n_no_decrease_max_epochs),
            MaxEpochs(num_epochs=n_max_epochs)
        ])

        dataset = combined_set
        splitter = dataset_splitter
        updates_expression = adam
        updates_modifier = MaxNormConstraintWithDefaults({})
        preproc = None
        exp = Experiment(final_layer,
                         dataset,
                         splitter,
                         preproc,
                         iterator,
                         loss_expression,
                         updates_expression,
                         updates_modifier,
                         monitors,
                         stop_criterion,
                         remember_best_chan,
                         run_after_early_stop,
                         batch_modifier=None)

        if only_return_exp:
            return exp

        exp.setup()
        exp.run()
        return exp

    all_monitor_chans = []
    n_folds = 10
    for i_fold in range(n_folds):
        log.info("Running fold {:d} of {:d}".format(i_fold + 1, n_folds))
        exp = run_exp(i_fold)
        if only_return_exp:
            return exp
        all_monitor_chans.append(exp.monitor_chans)
    end_time = time.time()
    run_time = end_time - start_time

    ex.info['finished'] = True
    keys = all_monitor_chans[0].keys()
    for key in keys:
        ex.info[key] = np.mean(
            [mchans[key][-1] for mchans in all_monitor_chans])
    ex.info['runtime'] = run_time
    save_pkl_artifact(ex, all_monitor_chans, 'all_monitor_chans.pkl')
Ejemplo n.º 10
0
def check_as_sets(train_file_name, test_file_name, combined_file_name):
    train_set = BCICompetition4Set2A(train_file_name)
    train_wyrm_set = SignalProcessor(train_set)
    train_wyrm_set.load()

    test_set = BCICompetition4Set2A(test_file_name)
    test_wyrm_set = SignalProcessor(test_set,
                                    marker_def={'Unknown': [-2147483648]})
    test_wyrm_set.load()

    combined_set = BCICompetition4Set2A(combined_file_name)
    combined_wyrm_set = SignalProcessor(combined_set)
    combined_wyrm_set.load()
    # nans were made to be means, so ignore that some values are not equal
    train_epo = train_wyrm_set.epo.data
    test_epo = test_wyrm_set.epo.data
    combined_epo = combined_wyrm_set.epo.data
    train_part = combined_epo[:288]
    assert (np.sum(train_epo - train_part != 0) /
            float(np.prod(train_epo.shape))) < 1e-2
    test_part = combined_epo[288:]
    assert (np.sum(test_epo - test_part) /
            float(np.prod(test_epo.shape))) < 1e-2
    log.info("Set ok")