Пример #1
0
def loading_data(img_pattern):
    loader = DataLoader(configuration_file=conf_file,
                        loader='mat',
                        task='fcmri',
                        event_file=img_pattern[:-4] + ".txt",
                        img_pattern=img_pattern,
                        atlas='findlab')

    prepro = PreprocessingPipeline(nodes=[
        Transformer(),
        #Detrender(),
        #SampleZNormalizer(),
        #FeatureZNormalizer()
    ])
    #prepro = PreprocessingPipeline()

    ds = loader.fetch(prepro=prepro)

    return ds
Пример #2
0
from pyitab.preprocessing.pipelines import PreprocessingPipeline
from pyitab.io.connectivity import load_mat_ds
from pyitab.simulation.loader import load_simulations

from pyitab.preprocessing.math import AbsoluteValueTransformer, SignTransformer
from pyitab.preprocessing.base import Transformer

from pyitab.analysis.states.base import Clustering
from sklearn import cluster, mixture
from joblib import Parallel, delayed

conf_file = "/media/robbis/DATA/fmri/working_memory/working_memory.conf"
conf_file = '/m/home/home9/97/guidotr1/unix/data/simulations/meg/simulations.conf'

loader = DataLoader(configuration_file=conf_file,
                    loader='simulations',
                    task='simulations')

ds = loader.fetch(prepro=Transformer())

_default_options = {
    'estimator': [
        [[('clf1', cluster.MiniBatchKMeans())]],
        [[('clf1', cluster.KMeans())]],
        [[('clf1', cluster.SpectralClustering())]],
        [[('clf1', cluster.AgglomerativeClustering())]],
        [[('clf5', mixture.GaussianMixture())]],
    ],
    'sample_slicer__subject': [[trial] for trial in np.unique(ds.sa.subject)],
    'estimator__clf1__n_clusters':
    range(2, 10),
Пример #3
0
from pyitab.analysis.states.gsbs import GSBS
from pyitab.io.loader import DataLoader

from pyitab.analysis.configurator import AnalysisConfigurator
from pyitab.preprocessing import SampleSlicer, FeatureSlicer
from pyitab.analysis.roi import RoiAnalyzer
import os
import numpy as np

conf_file = "/home/robbis/mount/permut1/sherlock/bids/bids.conf"
loader = DataLoader(configuration_file=conf_file,
                    loader='bids',
                    task='preproc',
                    bids_task=['day1'])

subjects = ['marcer', 'matsim', 'simpas']
for s in subjects:

    ds = loader.fetch(subject_names=[s],
                      prepro=[SampleSlicer(trial_type=np.arange(1, 32))])

    roi_analyzer = RoiAnalyzer(analysis=GSBS())
    roi_analyzer.fit(ds, roi=['aal'], kmax=50)

    roi_analyzer.save()

################## Resting state ##########################
conf_file = path = "/home/robbis/mount/permut1/sherlock/bids/bids.conf"
loader = DataLoader(configuration_file=conf_file,
                    data_path="/home/robbis/mount/permut1/sherlock/bids/",
                    subjects='participants.tsv',
Пример #4
0
from imblearn.over_sampling import SMOTE
import numpy as np

from imblearn.under_sampling import *
from imblearn.over_sampling import *

conf_file = "/home/carlos/mount/megmri03/fmri/carlo_ofp/ofp.conf"
conf_file = "/media/robbis/DATA/fmri/carlo_ofp/ofp.conf"
#conf_file = "/home/carlos/fmri/carlo_ofp/ofp_new.conf"

if conf_file[1] == 'h':
    from mvpa_itab.utils import enable_logging
    root = enable_logging()

loader = DataLoader(configuration_file=conf_file, task='OFP')
ds = loader.fetch()

return_ = True
ratio = 'auto'

_default_options = {
    'sample_slicer__evidence': [[1]],
    'sample_slicer__subject': [[s] for s in np.unique(ds.sa.subject)],
    'balancer__balancer': [
        AllKNN(return_indices=return_, ratio=ratio),
        CondensedNearestNeighbour(return_indices=return_, ratio=ratio),
        EditedNearestNeighbours(return_indices=return_, ratio=ratio),
        InstanceHardnessThreshold(return_indices=return_, ratio=ratio),
        NearMiss(return_indices=return_, ratio=ratio),
        OneSidedSelection(return_indices=return_, ratio=ratio),
from pyitab.analysis.iterator import AnalysisIterator
from pyitab.analysis.configurator import AnalysisConfigurator
from pyitab.analysis.pipeline import AnalysisPipeline
from pyitab.analysis.decoding.roi_decoding import RoiDecoding
from pyitab.preprocessing.pipelines import PreprocessingPipeline
from pyitab.preprocessing.functions import Detrender, SampleSlicer, \
    TargetTransformer, Transformer
from pyitab.preprocessing.normalizers import SampleZNormalizer

import warnings
warnings.filterwarnings("ignore")

conf_file = "/media/robbis/DATA/meg/reftep/bids.conf"
loader = DataLoader(configuration_file=conf_file,
                    task='reftep',
                    load_fx='reftep-conn',
                    loader='bids-meg',
                    bids_pipeline='connectivity+lv')

ds = loader.fetch(n_subjects=9)

_default_options = {
    'prepro': [
        ['sample_slicer', 'target_transformer'],
        ['sample_slicer', 'feature_znormalizer', 'target_transformer'],
        ['sample_slicer', 'sample_znormalizer', 'target_transformer'],
    ],
    'sample_slicer__subject': [[s] for s in np.unique(ds.sa.subject)],
    'estimator__fsel__k': [50, 100, 150],
    'estimator__clf': [
        LogisticRegression(penalty='l1', solver='liblinear'),
Пример #6
0
from pyitab.analysis.decoding.roi_decoding import RoiDecoding
from pyitab.io.loader import DataLoader

from pyitab.preprocessing.pipelines import PreprocessingPipeline
from pyitab.analysis.iterator import AnalysisIterator
from pyitab.analysis.configurator import AnalysisConfigurator

from pyitab.analysis.pipeline import AnalysisPipeline
from sklearn.feature_selection.univariate_selection import SelectKBest
from sklearn.model_selection import *
from sklearn.svm.classes import SVC

import _pickle as pickle

loader = DataLoader(
    configuration_file="/home/carlos/fmri/carlo_ofp/ofp_new.conf",
    task='OFP_NORES')
ds = loader.fetch()

decoding = RoiDecoding(n_jobs=20, scoring=['accuracy'])

results = dict()
for subject in np.unique(ds.sa.subject):
    results[subject] = []
    for evidence in [1, 2, 3]:

        pipeline = PreprocessingPipeline(nodes=[
            TargetTransformer('decision'),
            SampleSlicer(**{
                'subject': [subject],
                'evidence': [evidence]
Пример #7
0
    return np.mean(error)




conf_file = "/media/robbis/DATA/fmri/monks/meditation.conf"

matrix_list = glob.glob("/media/robbis/DATA/fmri/monks/061102chrwoo/fcmri/*.mat")
matrix_list = [m.split("/")[-1] for m in matrix_list]


for m in matrix_list:
    m = '20151103_132009_connectivity_filtered_first_filtered_after_each_run_no_gsr_findlab_fmri.mat'
    loader = DataLoader(configuration_file=conf_file, 
                        loader='mat',
                        task='fcmri',
                        atlas='findlab',
                        event_file=m[:-4]+".txt",
                        img_pattern=m)

    prepro = PreprocessingPipeline(nodes=[
                                        #Transformer(), 
                                        #Detrender(), 
                                        SampleZNormalizer(),
                                        #FeatureZNormalizer()
                                        ])
    #prepro = PreprocessingPipeline()


    ds = loader.fetch(prepro=prepro)

    _default_options = [
Пример #8
0
from pyitab.io.loader import DataLoader

conf_file = "/media/robbis/DATA/meg/reftep/bids.conf"

loader = DataLoader(configuration_file=conf_file,
                    task='reftep',
                    loader='bids-meg',
                    load_fx='reftep-iplv',
                    bids_pipeline='sensor+connectivity')

ds = loader.fetch()
Пример #9
0
from pyitab.preprocessing import Node
from pyitab.analysis.decoding.roi_decoding import Decoding
from pyitab.io.connectivity import load_mat_ds


from pyitab.preprocessing.math import AbsoluteValueTransformer

import warnings
warnings.filterwarnings("ignore")


conf_file =  "/media/robbis/DATA/fmri/carlo_mdm/memory.conf"


loader = DataLoader(configuration_file=conf_file, 
                    #loader=load_mat_ds,
                    task='BETA_MVPA',
                    roi_labels={'conjunction':"/media/robbis/DATA/fmri/carlo_mdm/1_single_ROIs/conjunction_map_mask.nii.gz"})

prepro = PreprocessingPipeline(nodes=[
                                      #Transformer(), 
                                      Detrender(), 
                                      SampleZNormalizer(),
                                      FeatureZNormalizer()
                                      ])
#prepro = PreprocessingPipeline()


ds = loader.fetch(prepro=prepro)
    
_default_options = {
                        'kwargs__use_partialcorr': [True, False],
Пример #10
0
#conf_file =  "/media/robbis/DATA/fmri/carlo_mdm/memory.conf"
conf_file = "/home/carlos/fmri/carlo_mdm/memory.conf"

roi_labels_fname = glob.glob(
    '/home/carlos/fmri/carlo_mdm/1_single_ROIs/*mask.nii.gz')
#roi_labels_fname = glob.glob('/home/robbis/mount/permut1/fmri/carlo_mdm/1_single_ROIs/*mask.nii.gz')
roi_labels_fname = glob.glob(
    '/media/robbis/DATA/fmri/carlo_mdm/1_single_ROIs/*mask.nii.gz')
roi_labels = {
    os.path.basename(fname).split('_')[0]: fname
    for fname in roi_labels_fname
}

loader = DataLoader(configuration_file=conf_file,
                    event_file='residuals_attributes_full',
                    roi_labels=roi_labels,
                    task='RESIDUALS_MVPA')

prepro = PreprocessingPipeline(nodes=[
    #Transformer(),
    #Detrender(attr='file'),
    Detrender(attr='chunks'),
    SampleZNormalizer(),
    FeatureZNormalizer(),
    SampleSlicer(frame=[1, 2, 3, 4, 5, 6, 7]),
    #TargetTransformer(attr='decision'),
    MemoryReducer(dtype=np.float16),
    #Balancer(attr='frame'),
])

ds = loader.fetch(prepro=prepro, n_subjects=8)
Пример #11
0
from pyitab.analysis.states.gsbs import GSBS
from pyitab.io.loader import DataLoader

from pyitab.analysis.configurator import AnalysisConfigurator
from pyitab.preprocessing import SampleSlicer, FeatureSlicer
from pyitab.preprocessing.connectivity import SpeedEstimator
import os
import numpy as np

conf_file = path = "/home/robbis/mount/permut1/sherlock/bids/bids.conf"
loader = DataLoader(configuration_file=conf_file,
                    loader='bids',
                    task='preproc',
                    bids_task=['day1'],
                    bids_run=['01', '02', '03'])

ds = loader.fetch(
    subject_names=['matsim'],
    prepro=[SampleSlicer(trial_type=np.arange(1, 32)),
            FeatureSlicer(aal=[1])])

X = ds.samples

speed = SpeedEstimator().transform(ds)
peaks = speed > np.mean(speed) + 2 * np.std(speed)
peaks_idx = np.nonzero(peaks.flatten())[0]

X_ = np.split(X, peaks_idx, axis=0)
cluster = [i * np.ones(x.shape[0]) for i, x in X_]
Пример #12
0
            SampleSlicer(selection_dictionary={'events_number': range(1, 13)})
        ]

        PreprocessingPipeline.__init__(self, nodes=self.nodes)


conf_file = "/media/robbis/DATA/fmri/monks/meditation.conf"

matrix_list = glob.glob(
    "/media/robbis/DATA/fmri/monks/061102chrwoo/fcmri/*.mat")
matrix_list = [m.split("/")[-1] for m in matrix_list]

for m in matrix_list:
    loader = DataLoader(configuration_file=conf_file,
                        loader=load_mat_ds,
                        task='fcmri',
                        event_file=m[:-4] + ".txt",
                        img_pattern=m)

    prepro = PreprocessingPipeline(nodes=[
        Transformer(),
        #Detrender(),
        SampleZNormalizer(),
        #FeatureZNormalizer()
    ])
    #prepro = PreprocessingPipeline()

    ds = loader.fetch(prepro=prepro)

    _default_options = {
        'sample_slicer__targets': [['Vipassana'], ['Samatha']],
Пример #13
0
    SampleZNormalizer, SampleZNormalizer, SampleSigmaNormalizer, \
    FeatureSigmaNormalizer
from pyitab.analysis.decoding.temporal_decoding import TemporalDecoding
from mne.decoding import (SlidingEstimator, GeneralizingEstimator, Scaler,
                          cross_val_multiscore, LinearModel, get_coef,
                          Vectorizer, CSP)
from sklearn.linear_model import LogisticRegression

import warnings
warnings.filterwarnings("ignore")
 
conf_file = "/media/robbis/DATA/meg/c2b/meeting-december-data/bids.conf"

loader = DataLoader(configuration_file=conf_file, 
                    loader='bids-meg',
                    bids_window='300',
                    bids_ses='01',
                    task='power')

ds = loader.fetch(subject_names=['sub-109123'], prepro=[Transformer()])
    
_default_options = {
                       
                       'loader__bids_ses': ['01', '02'],
                       
                       'sample_slicer__targets' : [
                           ['LH', 'RH'], 
                           ['LF', 'RF'], 
                           #['LH', 'RH', 'LF', 'RF']
                        ],
Пример #14
0
from pyitab.io.loader import DataLoader
from pyitab.preprocessing.pipelines import PreprocessingPipeline
from sklearn.model_selection import *
from pyitab.analysis.searchlight import SearchLight
from pyitab.analysis.rsa import RSA
from sklearn.pipeline import Pipeline
from sklearn.svm import SVC
from pyitab.analysis.iterator import AnalysisIterator
from pyitab.analysis.pipeline import AnalysisPipeline
from pyitab.analysis.configurator import AnalysisConfigurator
from pyitab.preprocessing import SampleSlicer
import os

conf_file = path = "/home/robbis/mount/permut1/sherlock/bids/bids.conf"
loader = DataLoader(configuration_file=conf_file,
                    loader='bids',
                    task='preproc',
                    bids_task=['day1'])

subjects = ['marcer', 'matsim', 'simpas']
for s in subjects:
    ds = loader.fetch(subject_names=[s],
                      prepro=[SampleSlicer(trial_type=np.arange(1, 32))])

    rsa = RSA(distance='correlation')

    rsa.fit(ds, roi=['aal'])
    rsa.save()

    del ds, rsa
Пример #15
0
from pyitab.preprocessing.functions import Detrender, Transformer
from pyitab.preprocessing.normalizers import FeatureZNormalizer, \
    SampleZNormalizer, SampleZNormalizer, SampleSigmaNormalizer, \
    FeatureSigmaNormalizer, DatasetFxNormalizer
from pyitab.analysis.decoding.roi_decoding import RoiDecoding
from joblib import Parallel, delayed

path = "/media/robbis/Seagate_Pt1/data/working_memory/"
conf_file = "%s/data/working_memory.conf" % (path)

task = 'PSI'
task = 'PSICORR'

loader = DataLoader(configuration_file=conf_file,
                    loader='mat',
                    task=task,
                    data_path="%s/data/" % (path),
                    subjects="%s/data/participants.csv" % (path))

prepro = PreprocessingPipeline(nodes=[
    Transformer(),
    #SampleZNormalizer()
])

ds = loader.fetch(prepro=prepro)

_default_options = {
    'sample_slicer__targets': [['0back', '2back']],
    'sample_slicer__band': [[c] for c in np.unique(ds.sa.band)],
    'estimator__fsel__k': np.arange(1, 1200, 50),
}
Пример #16
0
#####################################

roi_labels_fname = glob.glob(
    '/home/carlos/fmri/carlo_mdm/1_single_ROIs/*mask.nii.gz')
roi_labels = {
    os.path.basename(fname).split('_')[0]: fname
    for fname in roi_labels_fname
}

configuration_file = "/home/carlos/fmri/carlo_mdm/memory.conf"
#configuration_file = "/media/robbis/DATA/fmri/carlo_mdm/memory.conf"

loader = DataLoader(
    configuration_file=configuration_file,
    #data_path="/home/carlos/mount/meg_workstation/Carlo_MDM/",
    task='BETA_MVPA',
    roi_labels=roi_labels,
    event_file="beta_attributes_full",
    brain_mask="mask_intersection")

prepro = PreprocessingPipeline(nodes=[
    #Transformer(),
    Detrender(),
    SampleZNormalizer(),
    FeatureZNormalizer(),
])
#prepro = PreprocessingPipeline()

ds = loader.fetch(prepro=prepro)
ds = MemoryReducer(dtype=np.float16).transform(ds)
Пример #17
0
import warnings
from pyitab.preprocessing.math import AbsoluteValueTransformer

warnings.filterwarnings("ignore")

######################################
# Only when running on permut1
from mvpa_itab.utils import enable_logging

root = enable_logging()
#####################################

conf_file = "/media/robbis/DATA/fmri/carlo_mdm/memory.conf"

loader = DataLoader(
    configuration_file=conf_file,
    #loader=load_mat_ds,
    task='BETA_MVPA')

prepro = PreprocessingPipeline(nodes=[
    #Transformer(),
    Detrender(),
    SampleZNormalizer(),
    FeatureZNormalizer()
])
#prepro = PreprocessingPipeline()

ds = loader.fetch(prepro=prepro)

_default_options = {
    #'target_trans__target': ["decision"],
    'sample_slicer__accuracy': [[1], [0]],
Пример #18
0
from pyitab.preprocessing.math import AbsoluteValueTransformer, SignTransformer

import warnings
warnings.filterwarnings("ignore")
 
######################################
# Only when running on permut1
from pyitab.utils import enable_logging
root = enable_logging()
#####################################

conf_file =  ""
conf_file = '/media/guidotr1/Seagate_Pt1/data/Viviana2018/meg/movie.conf' 
 
loader = DataLoader(configuration_file=conf_file,  
                    loader='mat', 
                    task='conn')


prepro = PreprocessingPipeline(nodes=[
                                      #SampleZNormalizer(),
                                      #FeatureZNormalizer(),
                                      Resampler(down=5)
                                      ])
#prepro = PreprocessingPipeline()


ds = loader.fetch(prepro=prepro)
    
_default_options = {
                    'sample_slicer__targets' : [['movie', 'rest'], 
Пример #19
0
import warnings
warnings.filterwarnings("ignore")
 
######################################
# Only when running on permut1
from pyitab.utils import enable_logging
root = enable_logging()
#####################################

#conf_file = "/home/carlos/mount/megmri03/working_memory/working_memory_remote.conf"
conf_file =  "/media/robbis/DATA/fmri/working_memory/working_memory.conf"
conf_file = "/media/robbis/Seagate_Pt1/data/working_memory/working_memory.conf"

loader = DataLoader(configuration_file=conf_file, 
                    loader=load_mat_ds,
                    task='CONN')

prepro = PreprocessingPipeline(nodes=[
                                      Transformer(), 
                                      #SignTransformer(),
                                      Detrender(),
                                      #AbsoluteValueTransformer(),
                                      SignTransformer(),
                                      #SampleSigmaNormalizer(),
                                      #FeatureSigmaNormalizer(),
                                      ])
#prepro = PreprocessingPipeline()


ds = loader.fetch(prepro=prepro)
Пример #20
0
from sklearn.model_selection import *
from pyitab.analysis.searchlight import SearchLight
from sklearn.pipeline import Pipeline
from sklearn.svm.classes import SVC
from pyitab.analysis.iterator import AnalysisIterator
from pyitab.analysis.pipeline import AnalysisPipeline
from pyitab.analysis.configurator import AnalysisConfigurator
import os

from pyitab.utils import enable_logging

root = enable_logging()

conf_file = "/home/carlos/fmri/carlo_ofp/ofp_new.conf"
#conf_file = "/media/robbis/DATA/fmri/carlo_ofp/ofp.conf"
loader = DataLoader(configuration_file=conf_file, task='OFP_NORES')
ds = loader.fetch()

import numpy as np

######################## Across Memory ##################################

_default_options = {
    'target_trans__target': ["memory_status"],
}

_default_config = {
    'prepro': ['sample_slicer', 'target_trans'],
    'sample_slicer__memory_status': ['L', 'F'],
    'sample_slicer__evidence': [1],
    'target_trans__target': "memory_status",
Пример #21
0
from pyitab.preprocessing.math import AbsoluteValueTransformer
warnings.filterwarnings("ignore")
   
######################################
# Only when running on permut1
from pyitab.utils import enable_logging
root = enable_logging()
#####################################


configuration_file = "/home/carlos/fmri/carlo_mdm/memory.conf"
#configuration_file = "/media/robbis/DATA/fmri/carlo_mdm/memory.conf"

loader = DataLoader(configuration_file=configuration_file, 
                    data_path="/home/carlos/mount/meg_workstation/Carlo_MDM/",
                    task='BETA_MVPA', 
                    event_file="full", 
                    brain_mask="mask_intersection")

prepro = PreprocessingPipeline(nodes=[
                                      #Transformer(), 
                                      Detrender(), 
                                      SampleZNormalizer(),
                                      FeatureZNormalizer(),

                                      ])
#prepro = PreprocessingPipeline()


ds = loader.fetch(prepro=prepro)
import warnings
warnings.filterwarnings("ignore")

from pyitab.utils import enable_logging
root = enable_logging()

data_path = '/media/robbis/DATA/meg/viviana-hcp/'

conf_file = "/media/robbis/DATA/meg/viviana-hcp/bids.conf"

loader = DataLoader(
    configuration_file=conf_file,
    data_path=data_path,
    subjects="/media/robbis/DATA/meg/viviana-hcp/participants.tsv",
    loader='bids-meg',
    task='blp',
    bids_atlas="complete",
    bids_correction="corr",
    bids_derivatives='True',
    load_fx='hcp-blp')

ds = loader.fetch()
nodes = ds.fa.nodes_1
matrix = np.zeros_like(ds.samples[0])
nanmask = np.logical_not(np.isnan(ds.samples).sum(0))
ds = ds[:, nanmask]

networks = ['AN', 'CON', 'DAN', 'DMN', 'FPN', 'LN', 'MN', 'VAN', 'VFN', 'VPN']
networks = [[n] for n in networks]

kwargs_list = [{'nodes_1': v, 'nodes_2': v} for v in networks]
Пример #23
0
from sklearn.feature_selection import f_regression

import numpy as np
from pyitab.io.loader import DataLoader
import os

from sklearn.pipeline import Pipeline
from sklearn.feature_selection.univariate_selection import SelectKBest

from pyitab.analysis.iterator import AnalysisIterator
from pyitab.analysis.configurator import AnalysisConfigurator
from pyitab.analysis.pipeline import AnalysisPipeline
from pyitab.analysis.decoding.regression import RoiRegression
from pyitab.preprocessing.pipelines import PreprocessingPipeline
from pyitab.preprocessing.functions import Detrender, SampleSlicer, \
    TargetTransformer, Transformer
from pyitab.preprocessing.normalizers import FeatureZNormalizer, \
    SampleZNormalizer, SampleZNormalizer, SampleSigmaNormalizer, \
    FeatureSigmaNormalizer

import warnings
warnings.filterwarnings("ignore")

conf_file = "/media/robbis/DATA/meg/reftep/bids.conf"
loader = DataLoader(configuration_file=conf_file,
                    task='reftep',
                    load_fx='reftep-sensor',
                    loader='bids-meg',
                    bids_space='sensor')

ds = loader.fetch(n_subjects=1)