def analysis(conf, name):
    #print(conf._default_options)
    kwargs = conf._get_kwargs()
    #print(kwargs)
    a = AnalysisPipeline(conf, name=name).fit(**kwargs)
    a.save(path="/media/robbis/DATA/fmri/c2b/")
    return
Example #2
0
def analysis(conf, name):
    #print(conf._default_options)
    kwargs = conf._get_kwargs()
    #print(kwargs)
    a = AnalysisPipeline(conf, name=name).fit(**kwargs)
    a.save(path="/media/guidotr1/Seagate_Pt1/data/simulations/")
    return
                   {
                       'task': ['task5']
                   },
               ])

iterator = AnalysisIterator(options,
                            AnalysisConfigurator,
                            config_kwargs=base_config)

errs = []

for conf in iterator:
    kwargs = conf._get_kwargs()
    try:
        a = AnalysisPipeline(conf, name='fingerprint+tavor').fit(ds, **kwargs)
        a.save()
    except Exception as err:
        errs.append([conf._default_options, err])

#######################################################################################

base_config = dict(prepro=['sample_slicer', 'feature_slicer'],
                   estimator=[('clf', LinearRegression())],
                   analysis=TaskPredictionTavor,
                   analysis__n_jobs=-1,
                   analysis__permutation=0,
                   analysis__verbose=0,
                   kwargs__x_attr={'task': ['rest']},
                   kwargs__prepro=[SampleZNormalizer()])

options = dict(sample_slicer__band=[
Example #4
0
def run(n_jobs):
    path = "/media/robbis/Seagate_Pt1/data/working_memory/"

    conf_file = "%s/data/working_memory.conf" % (path)

    ### Load datasets ###

    iterator_kwargs = {
        "loader__img_pattern": [
            #'power_parcel.mat',
            'power_normalized.mat',
            #'connectivity_matrix.mat'
            'mpsi_normalized.mat'
        ],
        "fetch__prepro": [['none'], ['none']],
        "loader__task": ["POWER", "CONN"]
    }

    config_kwargs = {
        'loader': DataLoader,
        'loader__configuration_file': conf_file,
        'loader__loader': 'mat',
        'loader__task': 'POWER',
        #'fetch__n_subjects': 57,
        "loader__data_path": "%s/data/" % (path),
        "loader__subjects": "%s/data/participants.csv" % (path),
    }

    iterator = AnalysisIterator(iterator_kwargs,
                                AnalysisConfigurator,
                                config_kwargs=config_kwargs,
                                kind='list')

    ds_list = [generate(configurator) for configurator in iterator]

    for i, ds in enumerate(ds_list):
        ds_ = ds.copy()
        if i == 0:
            k = np.arange(1, 88, 10)
            ds_ = DatasetFxNormalizer(ds_fx=np.mean).transform(ds_)
        else:
            k = np.arange(1, 400, 50)
            #ds_ = DatasetFxNormalizer(ds_fx=np.mean).transform(ds_)

        _default_options = {
            #'sample_slicer__targets' : [['0back', '2back'], ['0back', 'rest'], ['rest', '2back']],
            #'kwargs__ds': ds_list,
            'sample_slicer__targets': [['0back'], ['2back']],
            'target_transformer__attr': [
                'accuracy_0back_both', 'accuracy_2back_both', 'rt_0back_both',
                'rt_2back_both'
            ],
            'sample_slicer__band': [['alpha'], ['beta'], ['theta'], ['gamma']],
            'estimator__fsel__k':
            k,
            'clf__C': [1, 10, 100],
            'clf__kernel': ['linear', 'rbf']
        }

        _default_config = {
            'prepro': ['sample_slicer', 'target_transformer'],
            'sample_slicer__band': ['gamma'],
            'sample_slicer__targets': ['0back', '2back'],
            'estimator': [('fsel', SelectKBest(score_func=f_regression, k=5)),
                          ('clf', SVR(C=10, kernel='linear'))],
            'estimator__clf__C':
            1,
            'estimator__clf__kernel':
            'linear',
            'cv':
            GroupShuffleSplit,
            'cv__n_splits':
            75,
            'cv__test_size':
            0.25,
            'analysis_scoring': ['r2', 'neg_mean_squared_error'],
            'analysis':
            RoiRegression,
            'analysis__n_jobs':
            n_jobs,
            'analysis__permutation':
            0,
            'analysis__verbose':
            0,
            'kwargs__roi': ['matrix_values'],
            'kwargs__cv_attr':
            'subjects',
        }

        iterator = AnalysisIterator(_default_options,
                                    AnalysisConfigurator,
                                    config_kwargs=_default_config)

        for conf in iterator:
            kwargs = conf._get_kwargs()
            a = AnalysisPipeline(conf,
                                 name="triton+behavioural").fit(ds_, **kwargs)
            a.save()
            del a
Example #5
0
    'roi': ['lateral_ips'],
    'kwargs__prepro': ['featurenorm', 'samplenorm'],
    'analysis__n_jobs': 2
}

_default_options = {
    'sampleslicer__evidence': [[1], [2], [3]],
    'cv__n_splits': [3, 5],
}

iterator = AnalysisIterator(_default_options,
                            AnalysisConfigurator(**_default_config))
for conf in iterator:
    kwargs = conf._get_kwargs()
    a = AnalysisPipeline(conf, name="permut1_wm").fit(ds, **kwargs)
    a.save()

##################################################
# Review
from pyitab.preprocessing.pipelines import PreprocessingPipeline
from pyitab.analysis.iterator import AnalysisIterator
from pyitab.analysis.configurator import AnalysisConfigurator

loader = DataLoader(
    configuration_file="/home/carlos/fmri/carlo_ofp/ofp_new.conf",
    task='OFP_NORES')

prepro = PreprocessingPipeline(nodes=[
    #Transformer(),
    Detrender(),
    Detrender(chunks_attr='file'),
Example #6
0
import sentry_sdk

sentry_sdk.init(
    "https://[email protected]/1439199",
    traces_sample_rate=1.0,
)

errors = []
iterator = AnalysisIterator(_default_options,
                            AnalysisConfigurator,
                            config_kwargs=_default_config)
for conf in iterator:
    kwargs = conf._get_kwargs()
    try:
        a = AnalysisPipeline(conf, name="reftep+nine+subjects").fit(**kwargs)
        a.save(path="/media/robbis/DATA/meg/reftep/derivatives/results/")
    except Exception as err:
        capture_exception(err)
        errors.append([conf, err])

######## Permutations ################

conf_file = "/media/robbis/DATA/meg/reftep/bids.conf"

_default_options = {
    'loader__bids_band': [
        ['alphalow'],
    ],
    'loader__bids_pipeline':
    ['aal+connectivity', 'seed+connectivity', 'sensor+connectivity'],
    'fetch__subject_names': [['sub-003']],
Example #7
0
def analysis(ds, conf):
    kwargs = conf._get_kwargs()
    a = AnalysisPipeline(conf, name="c2b_01").fit(ds, **kwargs)
    a.save()
    del a
Example #8
0
                    'kwargs__cv_attr': 'subject',

                    }
 
errs = []

filtered_options = [_default_options[2]]

iterator = AnalysisIterator(filtered_options, 
                            AnalysisConfigurator(**_default_config), 
                            kind='configuration')
for conf in iterator:
    kwargs = conf._get_kwargs()
    try:
        a = AnalysisPipeline(conf, name="fingerprint").fit(ds, **kwargs)
        a.save(path="/home/carlos/fmri/carlo_mdm/0_results/")
    except Exception as err:
        errs.append([conf._default_options, err])
        capture_exception(err)


##### Results #####
from pyitab.results.base import filter_dataframe
from pyitab.results.bids import get_searchlight_results_bids
from scipy.stats import zscore


dataframe = get_searchlight_results_bids('/media/robbis/DATA/fmri/carlo_mdm/0_results/derivatives/')
df = filter_dataframe(dataframe, id=['uv5oyc6s'], filetype=['full'])

mask = ni.load("/media/robbis/DATA/fmri/carlo_mdm/1_single_ROIs/mask_intersection.nii.gz").get_data()
Example #9
0
                       'analysis': RoiDecoding,
                       'analysis__n_jobs': 5,
                       'analysis__permutation': 0,
                       'analysis__verbose': 0,
                       'kwargs__roi': ['matrix_values'],
                       'kwargs__cv_attr':'subjects',

                    }
 
 
iterator = AnalysisIterator(_default_options, AnalysisConfigurator(**_default_config))
for conf in iterator:
    kwargs = conf._get_kwargs()
    a = AnalysisPipeline(conf, name="wm_mpsi_norm_sign").fit(ds, **kwargs)
    a.save()
    del a


###############################################################
# 2019 #
conf_file =  "/media/robbis/DATA/fmri/working_memory/working_memory.conf"


loader = DataLoader(configuration_file=conf_file, 
                    loader=load_mat_ds,
                    task='MPSI_NORM')

prepro = PreprocessingPipeline(nodes=[
                                      Transformer(), 
                                      #SignTransformer(),
Example #10
0
                       'analysis': TemporalDecoding,
                       'analysis__n_jobs': 8,
                       'analysis__permutation': 0,
                       'analysis__verbose': 0,
                       'kwargs__roi': ['matrix_values'],
                       #'kwargs__cv_attr':'subjects',

                    }
 
estimators = []
iterator = AnalysisIterator(_default_options, AnalysisConfigurator, config_kwargs=_default_config)
for conf in iterator:
    kwargs = conf._get_kwargs()
    a = AnalysisPipeline(conf, name="cross+session").fit(ds=None, **kwargs)
    a.save(save_estimator=True)
    est = a._estimator.scores['mask-matrix_values_value-1.0'][0]['estimator']
    estimators.append(est)
    del a


#########################
# Prediction timecourse #

# Plot results

# See results.py
colormap = {'LH':'navy', 'RH':'firebrick', 'LF':'cornflowerblue', 'RF':'salmon'}
colors = [colormap[t] for t in y]

# Load second session