Exemple #1
0
    def __init__(self, **kwargs):

        self.nodes = [
            Detrender(chunks_attr='file'),
            Detrender(),
            FeatureZNormalizer(),
            SampleSlicer(selection_dictionary={'events_number': range(1, 13)})
        ]

        PreprocessingPipeline.__init__(self, nodes=self.nodes)
Exemple #2
0
def loading_data(img_pattern):
    loader = DataLoader(configuration_file=conf_file,
                        loader='mat',
                        task='fcmri',
                        event_file=img_pattern[:-4] + ".txt",
                        img_pattern=img_pattern,
                        atlas='findlab')

    prepro = PreprocessingPipeline(nodes=[
        Transformer(),
        #Detrender(),
        #SampleZNormalizer(),
        #FeatureZNormalizer()
    ])
    #prepro = PreprocessingPipeline()

    ds = loader.fetch(prepro=prepro)

    return ds
Exemple #3
0
    configuration_file="/home/carlos/fmri/carlo_ofp/ofp_new.conf",
    task='OFP_NORES')
ds = loader.fetch()

decoding = RoiDecoding(n_jobs=20, scoring=['accuracy'])

results = dict()
for subject in np.unique(ds.sa.subject):
    results[subject] = []
    for evidence in [1, 2, 3]:

        pipeline = PreprocessingPipeline(nodes=[
            TargetTransformer('decision'),
            SampleSlicer(**{
                'subject': [subject],
                'evidence': [evidence]
            }),
            Balancer(balancer=RandomUnderSampler(return_indices=True),
                     attr='chunks'),
        ])

        ds_ = pipeline.transform(ds)

        decoding.fit(ds_, roi=['lateral_ips'])

        results[subject].append(decoding.scores)

with open(
        os.path.join(loader._data_path,
                     '0_results/lateral_ips_decoding.pickle'), 'wb') as output:
    pickle.dump(results, output)
matrix_list = glob.glob("/media/robbis/DATA/fmri/monks/061102chrwoo/fcmri/*.mat")
matrix_list = [m.split("/")[-1] for m in matrix_list]


for m in matrix_list:
    m = '20151103_132009_connectivity_filtered_first_filtered_after_each_run_no_gsr_findlab_fmri.mat'
    loader = DataLoader(configuration_file=conf_file, 
                        loader='mat',
                        task='fcmri',
                        atlas='findlab',
                        event_file=m[:-4]+".txt",
                        img_pattern=m)

    prepro = PreprocessingPipeline(nodes=[
                                        #Transformer(), 
                                        #Detrender(), 
                                        SampleZNormalizer(),
                                        #FeatureZNormalizer()
                                        ])
    #prepro = PreprocessingPipeline()


    ds = loader.fetch(prepro=prepro)

    _default_options = [
                        {
                            'prepro':['sample_slicer', 'target_transformer'],
                            'target_transformer__attr': 'expertise',
                            'sample_slicer__targets': ['Samatha']
                        },
                        {
                            'prepro':['sample_slicer', 'target_transformer'],
Exemple #5
0
nodes = ds.fa.nodes_1
matrix = np.zeros_like(ds.samples[0])

nanmask = np.logical_not(np.isnan(ds.samples).sum(0))
ds = ds[:, nanmask]


# 1. Transform dataset to have mean 0 and std 1
prepro = [
    FeatureZNormalizer(),
    SampleAttributeTransformer(attr='dexterity1', fx=('zscore', zscore)),
    SampleAttributeTransformer(attr='dexterity2', fx=('zscore', zscore)),
]

ds = PreprocessingPipeline(nodes=prepro).transform(ds)


formulas = [
    'task + dexterity1 - 1',
    'task + dexterity2 - 1',
    'task - 1',
    'dexterity1 - 1',
    'dexterity2 - 1',
]

contrasts = [
    
       {'f+restvstask': [1, -1/6, -1/6, -1/6, -1/6, -1/6, -1/6,  0],
        'f+taskvsdext': [1/7, 1/7, 1/7, 1/7, 1/7, 1/7, 1/7,  -1],
        'f+task': [[1, -1, 0, 0, 0, 0, 0,  0], 
Exemple #6
0
}

configuration_file = "/home/carlos/fmri/carlo_mdm/memory.conf"
#configuration_file = "/media/robbis/DATA/fmri/carlo_mdm/memory.conf"

loader = DataLoader(
    configuration_file=configuration_file,
    #data_path="/home/carlos/mount/meg_workstation/Carlo_MDM/",
    task='BETA_MVPA',
    roi_labels=roi_labels,
    event_file="beta_attributes_full",
    brain_mask="mask_intersection")

prepro = PreprocessingPipeline(nodes=[
    #Transformer(),
    Detrender(),
    SampleZNormalizer(),
    FeatureZNormalizer(),
])
#prepro = PreprocessingPipeline()

ds = loader.fetch(prepro=prepro)
ds = MemoryReducer(dtype=np.float16).transform(ds)

_default_options = [
    {
        'target_transformer__attr':
        "image_type",
        'sample_slicer__attr': {
            'image_type': ["I", "O"],
            'evidence': [1]
        },
from joblib import Parallel, delayed

path = "/media/robbis/Seagate_Pt1/data/working_memory/"
conf_file = "%s/data/working_memory.conf" % (path)

task = 'PSI'
task = 'PSICORR'

loader = DataLoader(configuration_file=conf_file,
                    loader='mat',
                    task=task,
                    data_path="%s/data/" % (path),
                    subjects="%s/data/participants.csv" % (path))

prepro = PreprocessingPipeline(nodes=[
    Transformer(),
    #SampleZNormalizer()
])

ds = loader.fetch(prepro=prepro)

_default_options = {
    'sample_slicer__targets': [['0back', '2back']],
    'sample_slicer__band': [[c] for c in np.unique(ds.sa.band)],
    'estimator__fsel__k': np.arange(1, 1200, 50),
}

_default_config = {
    'prepro': ['sample_slicer'],
    #'ds_normalizer__ds_fx': np.std,
    'sample_slicer__band': ['gamma'],
    'sample_slicer__targets': ['0back', '2back'],
Exemple #8
0
root = enable_logging()
#####################################

#conf_file = "/home/carlos/mount/megmri03/working_memory/working_memory_remote.conf"
conf_file =  "/media/robbis/DATA/fmri/working_memory/working_memory.conf"
conf_file = "/media/robbis/Seagate_Pt1/data/working_memory/working_memory.conf"

loader = DataLoader(configuration_file=conf_file, 
                    loader=load_mat_ds,
                    task='CONN')

prepro = PreprocessingPipeline(nodes=[
                                      Transformer(), 
                                      #SignTransformer(),
                                      Detrender(),
                                      #AbsoluteValueTransformer(),
                                      SignTransformer(),
                                      #SampleSigmaNormalizer(),
                                      #FeatureSigmaNormalizer(),
                                      ])
#prepro = PreprocessingPipeline()


ds = loader.fetch(prepro=prepro)
    
_default_options = {
                       'sample_slicer__targets' : [['0back', '2back'], ['0back', 'rest'], ['rest', '2back']],
                       #'sample_slicer__targets' : [['0back', '2back']],
                       'sample_slicer__band': [[c] for c in np.unique(ds.sa.band)],
                       #'estimator__clf__C': [0.1, 1, 10],                          
                       #'cv__n_splits': [75, 150, 200, 250],
# Only when running on permut1
from pyitab.utils import enable_logging
root = enable_logging()
#####################################

conf_file =  ""
conf_file = '/media/guidotr1/Seagate_Pt1/data/Viviana2018/meg/movie.conf' 
 
loader = DataLoader(configuration_file=conf_file,  
                    loader='mat', 
                    task='conn')


prepro = PreprocessingPipeline(nodes=[
                                      #SampleZNormalizer(),
                                      #FeatureZNormalizer(),
                                      Resampler(down=5)
                                      ])
#prepro = PreprocessingPipeline()


ds = loader.fetch(prepro=prepro)
    
_default_options = {
                    'sample_slicer__targets' : [['movie', 'rest'], 
                                                ['movie', 'scramble']],
                    'sample_slicer__band' : [['alpha'], ['beta']],
                    }    
    
_default_config = {
               
Exemple #10
0
# Only when running on permut1
from mvpa_itab.utils import enable_logging

root = enable_logging()
#####################################

conf_file = "/media/robbis/DATA/fmri/carlo_mdm/memory.conf"

loader = DataLoader(
    configuration_file=conf_file,
    #loader=load_mat_ds,
    task='BETA_MVPA')

prepro = PreprocessingPipeline(nodes=[
    #Transformer(),
    Detrender(),
    SampleZNormalizer(),
    FeatureZNormalizer()
])
#prepro = PreprocessingPipeline()

ds = loader.fetch(prepro=prepro)

_default_options = {
    #'target_trans__target': ["decision"],
    'sample_slicer__accuracy': [[1], [0]],
}

_default_config = {
    'prepro': ['sample_slicer', 'target_transformer', 'balancer'],
    'sample_slicer__decision': ['NEW', 'OLD'],
    'sample_slicer__evidence': [1],
    SampleAttributeTransformer(attr='dexterity2', fx=('zscore', zscore)),
]

networks = ['AN', 'CON', 'DAN', 'DMN', 'FPN', 'LN', 'MN', 'VAN', 'VFN', 'VPN']
tasks = ['task1']

betas = list()
intercepts = list()
for net in networks:
    for subj in np.unique(ds.sa.subject):
        prepro_X = [
            SampleSlicer(subject=[subj], band=['alpha'], task=['rest']),
            FeatureSlicer(nodes_1=['MN'], nodes_2=['MN']),
            SampleZNormalizer()
        ]
        X = PreprocessingPipeline(nodes=prepro_X).transform(ds).samples.T

        prepro_y = [
            SampleSlicer(subject=[subj], band=['alpha'], task=['task1']),
            FeatureSlicer(nodes_1=['MN'], nodes_2=['MN']),
            SampleZNormalizer()
        ]

        y = PreprocessingPipeline(nodes=prepro_y).transform(ds).samples.T

        linear = LinearRegression()
        linear.fit(X, y)

        betas.append(linear.coef_.squeeze())
        intercepts.append(linear.intercept_.squeeze())