Beispiel #1
0
def create_shallow_net(in_chans, input_time_length):
    # receptive field size is determined by model architecture
    n_classes = 2
    # ensure reproducibility by resetting lasagne/theano random generator
    lasagne.random.set_rng(RandomState(34734))

    shallow_net = ShallowFBCSPNet(in_chans,
                                  input_time_length,
                                  n_classes,
                                  n_filters_time=40,
                                  filter_time_length=25,
                                  n_filters_spat=40,
                                  pool_time_length=75,
                                  pool_time_stride=15,
                                  final_dense_length=30,
                                  conv_nonlin=square,
                                  pool_mode='average_exc_pad',
                                  pool_nonlin=safe_log,
                                  split_first_layer=True,
                                  batch_norm=True,
                                  batch_norm_alpha=0.1,
                                  drop_prob=0.5)
    final_layer = shallow_net.get_layers()[-1]
    final_layer = ClipLayer(final_layer, 1e-4, 1 - 1e-4)
    return final_layer
Beispiel #2
0
def create_shallow_net(in_chans, input_time_length, final_dense_length,
                       n_classes):
    shallow_net = create_shallow_model(in_chans, input_time_length,
                                       final_dense_length, n_classes)
    final_layer = shallow_net.get_layers()[-1]
    final_layer = ClipLayer(final_layer, 1e-4, 1 - 1e-4)
    return final_layer
Beispiel #3
0
def create_merged_net(in_chans,
                      input_time_length,
                      final_dense_length_deep,
                      final_dense_length_shallow,
                      n_classes,
                      filter_time_length_deep=13):
    n_deep_features = 60
    n_shallow_features = 40
    batch_norm_before_merge = True
    nonlin_before_merge = elu
    deep_net = create_deep_model(in_chans,
                                 input_time_length,
                                 final_dense_length_deep,
                                 n_classes,
                                 filter_time_length=filter_time_length_deep)
    shallow_net = create_shallow_model(in_chans, input_time_length,
                                       final_dense_length_shallow, n_classes)
    merged_net = MergedNet([
        deep_net,
        shallow_net,
    ], [n_deep_features, n_shallow_features],
                           n_classes,
                           batch_norm_before_merge=batch_norm_before_merge,
                           nonlin_before_merge=nonlin_before_merge)
    final_layer = merged_net.get_layers()[-1]
    final_layer = ClipLayer(final_layer, 1e-4, 1 - 1e-4)
    return final_layer
def create_deep_net(in_chans, input_time_length):
    num_filters_time = 25
    filter_time_length = 10
    num_filters_spat = 25
    pool_time_length = 3
    pool_time_stride = 3
    num_filters_2 = 50
    filter_length_2 = 10
    num_filters_3 = 100
    filter_length_3 = 10
    num_filters_4 = 200
    filter_length_4 = 10
    final_dense_length = 2
    n_classes = 4
    final_nonlin = softmax
    first_nonlin = elu
    first_pool_mode = 'max'
    first_pool_nonlin = identity
    later_nonlin = elu
    later_pool_mode = 'max'
    later_pool_nonlin = identity
    drop_in_prob = 0.0
    drop_prob = 0.5
    batch_norm_alpha = 0.1
    double_time_convs = False
    split_first_layer = True
    batch_norm = True

    d5net = Deep5Net(in_chans=in_chans,
                     input_time_length=input_time_length,
                     num_filters_time=num_filters_time,
                     filter_time_length=filter_time_length,
                     num_filters_spat=num_filters_spat,
                     pool_time_length=pool_time_length,
                     pool_time_stride=pool_time_stride,
                     num_filters_2=num_filters_2,
                     filter_length_2=filter_length_2,
                     num_filters_3=num_filters_3,
                     filter_length_3=filter_length_3,
                     num_filters_4=num_filters_4,
                     filter_length_4=filter_length_4,
                     final_dense_length=final_dense_length,
                     n_classes=n_classes,
                     final_nonlin=final_nonlin,
                     first_nonlin=first_nonlin,
                     first_pool_mode=first_pool_mode,
                     first_pool_nonlin=first_pool_nonlin,
                     later_nonlin=later_nonlin,
                     later_pool_mode=later_pool_mode,
                     later_pool_nonlin=later_pool_nonlin,
                     drop_in_prob=drop_in_prob,
                     drop_prob=drop_prob,
                     batch_norm_alpha=batch_norm_alpha,
                     double_time_convs=double_time_convs,
                     split_first_layer=split_first_layer,
                     batch_norm=batch_norm)
    final_layer = d5net.get_layers()[-1]
    final_layer = ClipLayer(final_layer, 1e-4, 1 - 1e-4)
    return final_layer
Beispiel #5
0
def create_deep_net(in_chans,
                    input_time_length,
                    final_dense_length,
                    n_classes,
                    filter_time_length=10,
                    filter_length_4=10):
    d5net = create_deep_model(in_chans,
                              input_time_length,
                              final_dense_length,
                              n_classes,
                              filter_time_length=filter_time_length,
                              filter_length_4=filter_length_4)
    final_layer = d5net.get_layers()[-1]
    final_layer = ClipLayer(final_layer, 1e-4, 1 - 1e-4)
    return final_layer
Beispiel #6
0
def create_deep_net(in_chans, input_time_length):
    # receptive field size is determined by model architecture
    num_filters_time = 25
    filter_time_length = 10
    num_filters_spat = 25
    pool_time_length = 3
    pool_time_stride = 3
    num_filters_2 = 50
    filter_length_2 = 10
    num_filters_3 = 100
    filter_length_3 = 10
    num_filters_4 = 200
    filter_length_4 = 10
    final_dense_length = 2
    n_classes = 2
    final_nonlin = softmax
    first_nonlin = elu
    first_pool_mode = 'max'
    first_pool_nonlin = identity
    later_nonlin = elu
    later_pool_mode = 'max'
    later_pool_nonlin = identity
    drop_in_prob = 0.0
    drop_prob = 0.5
    batch_norm_alpha = 0.1
    double_time_convs = False
    split_first_layer = True
    batch_norm = True
    # ensure reproducibility by resetting lasagne/theano random generator
    lasagne.random.set_rng(RandomState(34734))

    d5net = Deep5Net(in_chans=in_chans,
                     input_time_length=input_time_length,
                     num_filters_time=num_filters_time,
                     filter_time_length=filter_time_length,
                     num_filters_spat=num_filters_spat,
                     pool_time_length=pool_time_length,
                     pool_time_stride=pool_time_stride,
                     num_filters_2=num_filters_2,
                     filter_length_2=filter_length_2,
                     num_filters_3=num_filters_3,
                     filter_length_3=filter_length_3,
                     num_filters_4=num_filters_4,
                     filter_length_4=filter_length_4,
                     final_dense_length=final_dense_length,
                     n_classes=n_classes,
                     final_nonlin=final_nonlin,
                     first_nonlin=first_nonlin,
                     first_pool_mode=first_pool_mode,
                     first_pool_nonlin=first_pool_nonlin,
                     later_nonlin=later_nonlin,
                     later_pool_mode=later_pool_mode,
                     later_pool_nonlin=later_pool_nonlin,
                     drop_in_prob=drop_in_prob,
                     drop_prob=drop_prob,
                     batch_norm_alpha=batch_norm_alpha,
                     double_time_convs=double_time_convs,
                     split_first_layer=split_first_layer,
                     batch_norm=batch_norm)
    final_layer = d5net.get_layers()[-1]
    final_layer = ClipLayer(final_layer, 1e-4, 1 - 1e-4)
    return final_layer
Beispiel #7
0
    def run_exp(i_fold):
        # ensure reproducibility by resetting lasagne/theano random generator
        lasagne.random.set_rng(RandomState(34734))

        d5net = Deep5Net(in_chans=in_chans,
                         input_time_length=input_time_length,
                         num_filters_time=num_filters_time,
                         filter_time_length=filter_time_length,
                         num_filters_spat=num_filters_spat,
                         pool_time_length=pool_time_length,
                         pool_time_stride=pool_time_stride,
                         num_filters_2=num_filters_2,
                         filter_length_2=filter_length_2,
                         num_filters_3=num_filters_3,
                         filter_length_3=filter_length_3,
                         num_filters_4=num_filters_4,
                         filter_length_4=filter_length_4,
                         final_dense_length=final_dense_length,
                         n_classes=n_classes,
                         final_nonlin=final_nonlin,
                         first_nonlin=first_nonlin,
                         first_pool_mode=first_pool_mode,
                         first_pool_nonlin=first_pool_nonlin,
                         later_nonlin=later_nonlin,
                         later_pool_mode=later_pool_mode,
                         later_pool_nonlin=later_pool_nonlin,
                         drop_in_prob=drop_in_prob,
                         drop_prob=drop_prob,
                         batch_norm_alpha=batch_norm_alpha,
                         double_time_convs=double_time_convs,
                         split_first_layer=split_first_layer,
                         batch_norm=batch_norm)
        final_layer = d5net.get_layers()[-1]
        final_layer = ClipLayer(final_layer, 1e-4, 1 - 1e-4)
        dataset_splitter = CntTrialSingleFoldSplitter(n_folds=10,
                                                      i_test_fold=i_fold,
                                                      shuffle=True)
        iterator = CntWindowTrialIterator(
            batch_size=45,
            input_time_length=input_time_length,
            n_sample_preds=get_n_sample_preds(final_layer))

        monitors = [
            LossMonitor(),
            CntTrialMisclassMonitor(input_time_length=input_time_length),
            KappaMonitor(input_time_length=iterator.input_time_length,
                         mode='max'),
            RuntimeMonitor()
        ]

        #n_no_decrease_max_epochs = 2
        #n_max_epochs = 4
        n_no_decrease_max_epochs = 80
        n_max_epochs = 800
        # real values for paper were 80 and 800
        remember_best_chan = 'valid_' + stop_chan
        stop_criterion = Or([
            NoDecrease(remember_best_chan,
                       num_epochs=n_no_decrease_max_epochs),
            MaxEpochs(num_epochs=n_max_epochs)
        ])

        dataset = combined_set
        splitter = dataset_splitter
        updates_expression = adam
        updates_modifier = MaxNormConstraintWithDefaults({})
        preproc = None
        exp = Experiment(final_layer,
                         dataset,
                         splitter,
                         preproc,
                         iterator,
                         loss_expression,
                         updates_expression,
                         updates_modifier,
                         monitors,
                         stop_criterion,
                         remember_best_chan,
                         run_after_early_stop,
                         batch_modifier=None)

        if only_return_exp:
            return exp

        exp.setup()
        exp.run()
        return exp