Esempio n. 1
0
        if isinstance(config['pipeline'], BaseEstimator):
            pipeline = deepcopy(config['pipeline'])
        else:
            log.error(config['pipeline'])
            raise (
                ValueError('pipeline must be a list or a sklearn estimator'))

        # append the pipeline in the paradigm list
        if paradigm not in paradigms.keys():
            paradigms[paradigm] = {}

        # FIXME name are not unique
        log.debug('Pipeline: \n\n {} \n'.format(get_string_rep(pipeline)))
        paradigms[paradigm][config['name']] = pipeline

all_results = []
for paradigm in paradigms:
    # get the context
    if len(context_params) == 0:
        context_params[paradigm] = {}
    log.debug('{}: {}'.format(paradigm, context_params[paradigm]))
    p = getattr(moabb_paradigms, paradigm)(**context_params[paradigm])
    context = WithinSessionEvaluation(paradigm=p,
                                      random_state=42,
                                      n_jobs=options.threads)
    results = context.process(pipelines=paradigms[paradigm])
    all_results.append(results)
analyze(pd.concat(all_results, ignore_index=True),
        options.output,
        plot=options.plot)
Esempio n. 2
0
import logging
import coloredlogs
logging.basicConfig(level=logging.DEBUG)
logger = logging.getLogger()
coloredlogs.install(level=logging.DEBUG)

datasets = utils.dataset_search('imagery',
                                events=['supination', 'hand_close'],
                                has_all_events=False,
                                min_subjects=2,
                                multi_session=False)

for d in datasets:
    d.subject_list = d.subject_list[:10]

paradigm = ImageryNClass(2)
context = WithinSessionEvaluation(paradigm=paradigm,
                                  datasets=datasets,
                                  random_state=42)

pipelines = OrderedDict()
pipelines['av+TS'] = make_pipeline(Covariances(estimator='oas'),
                                   TSclassifier())
pipelines['av+CSP+LDA'] = make_pipeline(Covariances(estimator='oas'), CSP(8),
                                        LDA())

results = context.process(pipelines, overwrite=True)

analyze(results, './')