def _uniform_masking(fmri_list, tr, high_pass=0.01, smoothing=5):
    """ Mask all the sessions uniformly, doing standardization, linear
    detrending, DCT high_pas filtering and gaussian smoothing.

    Parameters
    ----------

    fmri_list: array-like
        array containing multiple BOLD data from different sessions

    high_pass: float
        frequency at which to apply the high pass filter, defaults to 0.01

    smoothing: float
        spatial scale of the gaussian smoothing filter in mm, defaults to 5

    Returns
    -------

    fmri_list_masked: array-like
        array containing the masked data

    """
    masker = MultiNiftiMasker(mask_strategy='epi',
                              standardize=True,
                              detrend=True,
                              high_pass=0.01,
                              t_r=tr,
                              smoothing_fwhm=smoothing)
    fmri_list_masked = masker.fit_transform(fmri_list)

    return fmri_list_masked
Example #2
0
def split_fmri_data(subj, n_splits=10):
    subj_preprocessed_path = spenc_dir+'synthesis/clean_data/sub{0:03d}/'.format(subj)

    run_fn = [subj_preprocessed_path+'run{0:03d}.nii.gz'.format(i) for i in xrange(8)]

    masker = MultiNiftiMasker()
    data = np.concatenate(masker.fit_transform(run_fn), axis=0).astype('float32')
    duration = np.array([902,882,876,976,924,878,1084,676])

    # i did not kick out the first/last 4 samples per run yet
    slice_nr_per_run = [dur/2 for dur in duration]

    # use broadcasting to get indices to delete around the borders
    idx_borders = np.cumsum(slice_nr_per_run[:-1])[:,np.newaxis] + \
                  np.arange(-4,4)[np.newaxis,:]

    data = np.delete(data, idx_borders, axis=0)

    # and we're going to remove the last fmri slice
    # since it does not correspond to a movie part anymore
    data = data[:-1, :]

    # shape of TR samples
    data = data[3:]

    voxel_kfold = KFold(data.shape[1], n_folds=n_splits)
    return [data[:, split] for _, split in voxel_kfold]
Example #3
0
def _uniform_masking(fmri_list, tr, high_pass=0.01, smoothing=5):
    """ Mask all the sessions uniformly, doing standardization, linear
    detrending, DCT high_pas filtering and gaussian smoothing.

    Parameters
    ----------

    fmri_list: array-like
        array containing multiple BOLD data from different sessions

    high_pass: float
        frequency at which to apply the high pass filter, defaults to 0.01

    smoothing: float
        spatial scale of the gaussian smoothing filter in mm, defaults to 5

    Returns
    -------

    fmri_list_masked: array-like
        array containing the masked data

    """
    masker = MultiNiftiMasker(mask_strategy='epi', standardize=True,
                              detrend=True, high_pass=0.01,
                              t_r=tr, smoothing_fwhm=smoothing)
    fmri_list_masked = masker.fit_transform(fmri_list)

    return fmri_list_masked
Example #4
0
def _check_input_data(data, mask_img=None, return_first_element=False):
    if not isinstance(data, list):
        data = [data]

    if all(isinstance(x, nib.spatialimages.SpatialImage) for x in data):
        masker = MultiNiftiMasker(mask_img)
        data = masker.fit_transform(data)

    elif all(isinstance(x, np.ndarray) for x in data):
        pass

    else:
        raise ValueError('input_data must be an instance of numpy.ndarray or '
                         'nibabel.spatialimages.SpatialImage')

    # when being used for Decode -- the actual image/array is needed
    if return_first_element:
        return data[0]
    else:
        return data
Example #5
0
# We first create a masker, giving it the options that we care
# about. Here we use standardizing of the data, as it is often important
# for decoding
mask_filename = os.getcwd() + "/dataset/train/Patient_01/GT.nii.gz"
scan_filename = os.getcwd() + "/dataset/train/Patient_01/Patient_01.nii.gz"

masker = MultiNiftiMasker(mask_img=mask_filename, standardize=True)
print(masker)


# We give the masker a filename and retrieve a 2D array ready
# for machine learning with scikit-learn

masker.fit(scan_filename)
#masker.transform(scan_filename)
scan_masked = masker.fit_transform(scan_filename)

# calculate mean image for the background
mean_func_img = mean_img(scan_filename)
'''
plot_roi(masker.mask_img_, mean_func_img, display_mode='y', cut_coords=4, title="Mask")
show()
'''
# maxes = np.max(labelArray, axis=0)
# calculate mean image for the background
# mean_func_img = mean_img(filename)

# plot_roi(mask_img, mean_func_img, display_mode='y', cut_coords=4, title="Mask")

# https://nilearn.github.io/auto_examples/02_decoding/plot_simulated_data.html
# Baysian ridge
CACHE_DIR = os.path.join('/', 'disk4t', 'mehdi',
                         'data', 'tmp')

dataset = datasets.fetch_adni_rs_fmri()
func_files = dataset['func']
dx_group = np.array(dataset['dx_group'])

###############################################################################
# 1- Masking
###############################################################################
masker = MultiNiftiMasker(mask_strategy='epi',
                          mask_args=dict(opening=1),
                          memory_level=2,
                          memory=CACHE_DIR,
                          n_jobs=8)
func_masked = masker.fit_transform(func_files)

###############################################################################
#2- Testing
###############################################################################
idx = {}
for g in ['AD', 'LMCI', 'EMCI', 'Normal']:
    idx[g] = np.where(dx_group == g)

groups = [['AD', 'Normal'], ['AD', 'EMCI'], ['AD', 'LMCI'],
          ['EMCI', 'LMCI'], ['EMCI', 'Normal'], ['LMCI', 'Normal']]

for gr in groups:

    test_var = np.ones((len(func_files), 1), dtype=float)  # intercept
Example #7
0
cv = GroupShuffleSplit(n_splits=10, test_size=0.3, random_state=0)

######################################################################
# Predictive model - without reduction
# ------------------------------------

# Standard mask
icbm = fetch_icbm152_2009()
mask = icbm.mask

masker = MultiNiftiMasker(mask_img=mask,
                          target_affine=ref_img.affine,
                          target_shape=ref_img.shape,
                          n_jobs=5,
                          verbose=1)
X = masker.fit_transform(input_images)
X = np.vstack(X)
X = X_sc.fit_transform(X)

grid = GridSearchCV(estimator,
                    param_grid=param_grid,
                    cv=cv.split(X, y, groups),
                    verbose=1,
                    n_jobs=5)
# Prediction without reduction
grid.fit(X, y)
C = grid.best_params_['C']

svc = SVC(kernel='linear', C=C)
model_cv = GroupShuffleSplit(n_splits=20, test_size=0.3, random_state=0)
non_reduced_scores = cross_val_score(estimator=svc,
"""A simple example showing how to get X from 3D structural nifti images
"""
from os.path import join
import glob
import numpy as np
from nilearn.input_data import MultiNiftiMasker

# pre-computed mask. see script compute_mask.py for details
mask = 'mask.nii.gz'
# structural images
data_dir = '/storage/store/data/pac2018_data/pac2018.zip.001_FILES'
data_paths = sorted(glob.glob(join(data_dir, '*.nii')))

# Initialize MultiNiftiMasker for list of multiple subjects
multi_masker = MultiNiftiMasker(mask_img=mask, mask_strategy='background')

# Either use fit first and then transform to get X. fit method fits given mask
# image to input images.

X = multi_masker.fit_transform(data_paths)  # outputs list of masked subjects
X = np.concatenate(X)  # (number of samples, number of features)
Example #9
0
import pandas as pd
from dyneusr import DyNeuGraph

from nilearn.datasets import fetch_miyawaki2008
from nilearn.input_data import MultiNiftiMasker

from kmapper import KeplerMapper, Cover
from sklearn.manifold import TSNE
from sklearn.cluster import DBSCAN

# Fetch dataset, extract time-series from ventral temporal (VT) mask
dataset = fetch_miyawaki2008()
masker = MultiNiftiMasker(mask_img=dataset.mask,
                          detrend=True,
                          standardize=False)
X = masker.fit_transform(dataset.func[0])

# Encode labels as integers
#df = pd.read_csv(dataset.session_target[0], sep=" ")
#target, labels = pd.factorize(df.labels.values)
#y = pd.DataFrame({l:(target==i).astype(int) for i,l in enumerate(labels)})

# Generate shape graph using KeplerMapper
mapper = KeplerMapper(verbose=1)
lens = mapper.fit_transform(X, projection=TSNE(2))
graph = mapper.map(lens, X, cover=Cover(20, 0.5), clusterer=DBSCAN(eps=20.))

# Visualize the shape graph using DyNeuSR's DyNeuGraph
dG = DyNeuGraph(G=graph)
dG.visualize('output.html')
Example #10
0
import joblib
pca = PCA(n_components=1)
bart_masked = []
bart_masked_pca = []
bart_masked_id = []
for i in df['bart_file']:
    for j in df['bart_mask']:
        for k in df['subject_id']:
            if (k in j) and (k in i):
                bart_masker = MultiNiftiMasker(mask_img=j,
                                               smoothing_fwhm=4,
                                               standardize=True,
                                               detrend=True,
                                               memory='D:/cache',
                                               memory_level=1)
                x = bart_masker.fit_transform(index_img(i, slice(0, 100)))
                bart_masked_pca.append(pca.fit_transform(x))
                bart_masked_id.append(k)
#Load bart_masked_pca onto dataframe
bart_masked_pca_1 = pd.Series(bart_masked_pca, name='bart_masked_pca')
bart_masked_id_1 = pd.Series(bart_masked_id, name='subject_id')
data_masked_pca = {
    'bart_masked_pca': bart_masked_pca_1,
    'subject_id': bart_masked_id_1
}
df_masked_pca = pd.concat(data_masked_pca, axis=1)
df = df.merge(df_masked_pca, how='outer')

# %%
#Save bart numpy array
bart_masked_pca = np.asarray(bart_masked_pca)