Exemplo n.º 1
0
##################################################
# Review
from pyitab.preprocessing.pipelines import PreprocessingPipeline
from pyitab.analysis.iterator import AnalysisIterator
from pyitab.analysis.configurator import AnalysisConfigurator

loader = DataLoader(
    configuration_file="/home/carlos/fmri/carlo_ofp/ofp_new.conf",
    task='OFP_NORES')

prepro = PreprocessingPipeline(nodes=[
    #Transformer(),
    Detrender(),
    Detrender(chunks_attr='file'),
    SampleZNormalizer(),
    FeatureZNormalizer(),
])

ds = loader.fetch(prepro=prepro)

_default_options = {
    'kwargs__roi': [['within_conjunction']],
    #'sample_slicer__subject': [[s] for s in],
    'sample_slicer__evidence': [[1], [2], [3]],
}

_default_config = {
    'prepro': ['target_transformer', 'sample_slicer', 'balancer'],
    'target_transformer__attr': 'decision',
    'sample_slicer__decision': ['L', 'F'],
    'balancer__attr': 'subject',
Exemplo n.º 2
0
    bids_atlas="complete",
    bids_correction="corr",
    bids_derivatives='True',
    load_fx='hcp-blp')

ds = loader.fetch()
nodes = ds.fa.nodes_1

matrix = np.zeros_like(ds.samples[0])

nanmask = np.logical_not(np.isnan(ds.samples).sum(0))
ds = ds[:, nanmask]

prepro = [
    SampleSlicer(task=['rest', 'task1', 'task2', 'task4', 'task5']),
    FeatureZNormalizer(),
    SampleAttributeTransformer(attr='dexterity1', fx=('zscore', zscore)),
    SampleAttributeTransformer(attr='dexterity2', fx=('zscore', zscore)),
]

ds = PreprocessingPipeline(nodes=prepro).transform(ds)

bands = ['alpha', 'betahigh', 'betalow']
tasks = ['rest', 'task1', 'task2', 'task4', 'task5']

dataframe = dict()

for b, t in product(bands, tasks):
    prepro = [SampleSlicer(task=[t], band=[b]), FeatureZNormalizer()]
    ds_ = PreprocessingPipeline(nodes=prepro).transform(ds)
Exemplo n.º 3
0
                    bids_derivatives='True',
                    load_fx='hcp-blp')


ds = loader.fetch()

nodes = ds.fa.nodes_1
matrix = np.zeros_like(ds.samples[0])

nanmask = np.logical_not(np.isnan(ds.samples).sum(0))
ds = ds[:, nanmask]


# 1. Transform dataset to have mean 0 and std 1
prepro = [
    FeatureZNormalizer(),
    SampleAttributeTransformer(attr='dexterity1', fx=('zscore', zscore)),
    SampleAttributeTransformer(attr='dexterity2', fx=('zscore', zscore)),
]

ds = PreprocessingPipeline(nodes=prepro).transform(ds)


formulas = [
    'task + dexterity1 - 1',
    'task + dexterity2 - 1',
    'task - 1',
    'dexterity1 - 1',
    'dexterity2 - 1',
]
Exemplo n.º 4
0
roi_labels = {
    os.path.basename(fname).split('_')[0]: fname
    for fname in roi_labels_fname
}

loader = DataLoader(configuration_file=conf_file,
                    event_file='residuals_attributes_full',
                    roi_labels=roi_labels,
                    task='RESIDUALS_MVPA')

prepro = PreprocessingPipeline(nodes=[
    #Transformer(),
    #Detrender(attr='file'),
    Detrender(attr='chunks'),
    SampleZNormalizer(),
    FeatureZNormalizer(),
    SampleSlicer(frame=[1, 2, 3, 4, 5, 6, 7]),
    #TargetTransformer(attr='decision'),
    MemoryReducer(dtype=np.float16),
    #Balancer(attr='frame'),
])

ds = loader.fetch(prepro=prepro, n_subjects=8)

ds = MemoryReducer(dtype=np.float16).transform(ds)

labels = list(roi_labels.keys())[:-1]

import sentry_sdk

sentry_sdk.init("https://[email protected]/1439199")