Example #1
0
def test_base_decomposition():
    shape = (6, 8, 10, 5)
    affine = np.eye(4)
    rng = np.random.RandomState(0)
    data = []
    for i in range(8):
        this_data = rng.normal(size=shape)
        # Create fake activation to get non empty mask
        this_data[2:4, 2:4, 2:4, :] += 10
        data.append(nibabel.Nifti1Image(this_data, affine))
    mask = nibabel.Nifti1Image(np.ones(shape[:3], dtype=np.int8), affine)
    masker = MultiNiftiMasker(mask_img=mask)
    base_decomposition = BaseDecomposition(mask=masker, n_components=3)
    base_decomposition.fit(data)
    assert_true(base_decomposition.mask_img_ == mask)
    assert_true(
        base_decomposition.mask_img_ == base_decomposition.masker_.mask_img_)

    # Testing fit on data
    masker = MultiNiftiMasker()
    base_decomposition = BaseDecomposition(mask=masker, n_components=3)
    base_decomposition.fit(data)
    assert_true(
        base_decomposition.mask_img_ == base_decomposition.masker_.mask_img_)

    assert_raises_regex(
        ValueError, "Object has no components_ attribute. "
        "This may be because "
        "BaseDecomposition is directly "
        "being used.", base_decomposition.transform, data)
    assert_raises_regex(
        ValueError, 'Need one or more Niimg-like objects as input, '
        'an empty list was given.', base_decomposition.fit, [])
Example #2
0
def create_raw_contrast_data(imgs,
                             mask,
                             raw_dir,
                             memory=Memory(cachedir=None),
                             n_jobs=1,
                             batch_size=100):
    if not os.path.exists(raw_dir):
        os.makedirs(raw_dir)

    # Selection of contrasts
    masker = MultiNiftiMasker(smoothing_fwhm=0,
                              mask_img=mask,
                              memory=memory,
                              memory_level=1,
                              n_jobs=n_jobs).fit()
    mask_img_file = os.path.join(raw_dir, 'mask_img.nii.gz')
    masker.mask_img_.to_filename(mask_img_file)

    batches = gen_batches(len(imgs), batch_size)

    data = np.empty((len(imgs), masker.mask_img_.get_data().sum()),
                    dtype=np.float32)
    for i, batch in enumerate(batches):
        print('Batch %i' % i)
        data[batch] = masker.transform(imgs['z_map'].values[batch])
    imgs = pd.DataFrame(data=data, index=imgs.index, dtype=np.float32)
    imgs.to_pickle(join(raw_dir, 'imgs.pkl'))
def run(root_dir="/", dump_dir=None, data_set=None, n_jobs=1):
    from nilearn.input_data import MultiNiftiMasker, NiftiMapsMasker
    from joblib import Memory, Parallel, delayed
    import nibabel
    from tempfile import mkdtemp

    if dump_dir is None:
        dump_dir = mkdtemp(os.path.join(root_dir, 'tmp'))
    mem = Memory(cachedir=os.path.join(root_dir, dump_dir))
    print "Loading all paths and variables into memory"
    df = get_all_paths(root_dir=root_dir, data_set=data_set)
    target_affine_ = nibabel.load(df["func"][0]).get_affine()
    target_shape_ = nibabel.load(df["func"][0]).shape[:-1]
    print "preparing and running MultiNiftiMasker"
    mnm = MultiNiftiMasker(mask_strategy="epi", memory=mem, n_jobs=n_jobs,
                           verbose=10, target_affine=target_affine_,
                           target_shape=target_shape_)
    mask_img = mnm.fit(list(df["func"])).mask_img_
    print "preparing and running NiftiMapsMasker"
    nmm = NiftiMapsMasker(
        maps_img=os.path.join("/usr/share/fsl/data/atlases/HarvardOxford/",
                              "HarvardOxford-cortl-prob-2mm.nii.gz"),
        mask_img=mask_img, detrend=True, smoothing_fwhm=5, standardize=True,
        low_pass=None, high_pass=None, memory=mem, verbose=10)
    region_ts = Parallel(n_jobs=n_jobs)(
        delayed(_data_fitting)(niimg, nmm, n_hv_confounds=5)
        for niimg in list(df["func"]))
#   joblib.dump(region_ts,
#               os.path.join(dump_dir, "results.pkl"))
#   region_signals = DataFrame({"region_signals": region_ts}, index=df.index)
#   df.join(region_signals)
#   return df
    return region_ts
Example #4
0
def _uniform_masking(fmri_list, tr, high_pass=0.01, smoothing=5):
    """ Mask all the sessions uniformly, doing standardization, linear
    detrending, DCT high_pas filtering and gaussian smoothing.

    Parameters
    ----------

    fmri_list: array-like
        array containing multiple BOLD data from different sessions

    high_pass: float
        frequency at which to apply the high pass filter, defaults to 0.01

    smoothing: float
        spatial scale of the gaussian smoothing filter in mm, defaults to 5

    Returns
    -------

    fmri_list_masked: array-like
        array containing the masked data

    """
    masker = MultiNiftiMasker(mask_strategy='epi', standardize=True,
                              detrend=True, high_pass=0.01,
                              t_r=tr, smoothing_fwhm=smoothing)
    fmri_list_masked = masker.fit_transform(fmri_list)

    return fmri_list_masked
Example #5
0
def split_fmri_data(subj, n_splits=10):
    subj_preprocessed_path = spenc_dir+'synthesis/clean_data/sub{0:03d}/'.format(subj)

    run_fn = [subj_preprocessed_path+'run{0:03d}.nii.gz'.format(i) for i in xrange(8)]

    masker = MultiNiftiMasker()
    data = np.concatenate(masker.fit_transform(run_fn), axis=0).astype('float32')
    duration = np.array([902,882,876,976,924,878,1084,676])

    # i did not kick out the first/last 4 samples per run yet
    slice_nr_per_run = [dur/2 for dur in duration]

    # use broadcasting to get indices to delete around the borders
    idx_borders = np.cumsum(slice_nr_per_run[:-1])[:,np.newaxis] + \
                  np.arange(-4,4)[np.newaxis,:]

    data = np.delete(data, idx_borders, axis=0)

    # and we're going to remove the last fmri slice
    # since it does not correspond to a movie part anymore
    data = data[:-1, :]

    # shape of TR samples
    data = data[3:]

    voxel_kfold = KFold(data.shape[1], n_folds=n_splits)
    return [data[:, split] for _, split in voxel_kfold]
Example #6
0
def generate_fmri_data_for_subject(subject):
    """
	Input : Take as input each fmri file. One file = One block
	Load all fmri data and apply a global mask mak on it. The global mask is computed using the mask from each fmri run (block). 
	Applying a global mask for a subject uniformize the data. 
	Output: Output fmri_runs for a subject, corrected using a global mask
	"""
    fmri_filenames = sorted(
        glob.glob(
            os.path.join(paths.rootpath, "fmri-data/en", "sub-%03d" % subject,
                         "func", "resample*.nii")))

    masks_filenames = sorted(
        glob.glob(
            os.path.join(paths.path2Data, "en/fmri_data/masks",
                         "sub_{}".format(subject), "resample*.pkl")))
    masks = []
    for file in masks_filenames:
        with open(file, 'rb') as f:
            mask = pickle.load(f)
            masks.append(mask)

    global_mask = math_img('img>0.5', img=mean_img(masks))
    masker = MultiNiftiMasker(global_mask, detrend=True, standardize=True)
    masker.fit()
    fmri_runs = [masker.transform(f) for f in tqdm(fmri_filenames)]
    print(fmri_runs[0].shape)

    return fmri_runs
def _uniform_masking(fmri_list, tr, high_pass=0.01, smoothing=5):
    """ Mask all the sessions uniformly, doing standardization, linear
    detrending, DCT high_pas filtering and gaussian smoothing.

    Parameters
    ----------

    fmri_list: array-like
        array containing multiple BOLD data from different sessions

    high_pass: float
        frequency at which to apply the high pass filter, defaults to 0.01

    smoothing: float
        spatial scale of the gaussian smoothing filter in mm, defaults to 5

    Returns
    -------

    fmri_list_masked: array-like
        array containing the masked data

    """
    masker = MultiNiftiMasker(mask_strategy='epi',
                              standardize=True,
                              detrend=True,
                              high_pass=0.01,
                              t_r=tr,
                              smoothing_fwhm=smoothing)
    fmri_list_masked = masker.fit_transform(fmri_list)

    return fmri_list_masked
Example #8
0
def glass_brain(r2_voxels, subject, current_ROI, ROI_name, name):
    """
	Input : Masked results of r2score
	Take masked data and project it again in a 3D space
	Ouput : 3D glassbrain of r2score 
	"""

    # Get one mask and fit it to the corresponding ROI
    if current_ROI != -1 and current_ROI <= 5:
        masks_ROIs_filenames = sorted(
            glob.glob(os.path.join(paths.path2Data, "en/ROIs_masks/",
                                   "*.nii")))
        ROI_mask = masks_ROIs_filenames[current_ROI]
        ROI_mask = NiftiMasker(ROI_mask, detrend=True, standardize=True)
        ROI_mask.fit()
        unmasked_data = ROI_mask.inverse_transform(r2_voxels)

    # Get masks and fit a global mask
    else:
        masks = []
        masks_filenames = sorted(
            glob.glob(
                os.path.join(paths.path2Data, "en/fmri_data/masks",
                             "sub_{}".format(subject), "resample*.pkl")))
        for file in masks_filenames:
            with open(file, 'rb') as f:
                mask = pickle.load(f)
                masks.append(mask)

        global_mask = math_img('img>0.5', img=mean_img(masks))
        masker = MultiNiftiMasker(global_mask, detrend=True, standardize=True)
        masker.fit()
        unmasked_data = masker.inverse_transform(r2_voxels)

    display = plot_glass_brain(unmasked_data,
                               display_mode='lzry',
                               threshold='auto',
                               colorbar=True,
                               title='Sub_{}'.format(subject))
    if not os.path.exists(
            os.path.join(paths.path2Figures, 'glass_brain',
                         'Sub_{}'.format(subject), ROI_name)):
        os.makedirs(
            os.path.join(paths.path2Figures, 'glass_brain',
                         'Sub_{}'.format(subject), ROI_name))

    display.savefig(
        os.path.join(paths.path2Figures, 'glass_brain',
                     'Sub_{}'.format(subject), ROI_name,
                     'R_squared_test_{}.png'.format(name)))
    print(
        'Figure Path : ',
        os.path.join(paths.path2Figures, 'glass_brain',
                     'Sub_{}'.format(subject), ROI_name,
                     'R_squared_test_{}.png'.format(name)))
    display.close()
Example #9
0
def compute_global_masker(rootdir, subjects):
    # masks = [compute_epi_mask(glob.glob(op.join(rootdir, "fmri-data/en", "sub-%03d" % s, "func","*.nii"))) for s in subjects]
    # global_mask = math_img('img>0.5', img=mean_img(masks))
    # masker = MultiNiftiMasker(global_mask, detrend=True, standardize=True, memory='/volatile/tmp')
    masker = MultiNiftiMasker(mask_img=op.join(
        rootdir, "lpp-scripts3/inputs/ROIs/mask_ICV.nii"),
                              detrend=True,
                              standardize=True)
    masker.fit()
    return masker
def compute_all_subjects_mask():
    """ Computes the mask of all the subjects and the sesssions
    """
    masker = MultiNiftiMasker(mask_strategy='epi', memory=CACHE_DIR,
                              memory_level=2, n_jobs=10, verbose=5)
               
    imgs = dataset.func1 + dataset.func2
    masker.fit(imgs)
    masker.mask_img_.to_filename('all_subjects.nii.gz')
    plot_roi(masker.mask_img_)
Example #11
0
def compute_global_masker(files):  # [[path, path2], [path3, path4]]
    # return a MultiNiftiMasker object
    masks = [compute_epi_mask(f) for f in files]
    global_mask = math_img(
        'img>0.5',
        img=mean_img(masks))  # take the average mask and threshold at 0.5
    masker = MultiNiftiMasker(
        global_mask, detrend=True, standardize=True
    )  # return a object that transforms a 4D barin into a 2D matrix of voxel-time and can do the reverse action
    masker.fit()
    return masker
def compute_global_masker(rootdir, subjects):
    masks = [
        compute_epi_mask(
            glob.glob(
                os.path.join(rootdir, "fmri-data/en", "sub-%03d" % s, "func",
                             "*.nii"))) for s in subjects
    ]
    global_mask = math_img('img>0.5', img=mean_img(masks))
    masker = MultiNiftiMasker(global_mask, detrend=True, standardize=True)
    masker.fit()
    return masker
Example #13
0
def compute_rec():
    mask_img = fetch_mask()
    masker = MultiNiftiMasker(mask_img=mask_img).fit()
    atlas = fetch_atlas_modl()
    components_imgs = [
        atlas.positive_new_components16, atlas.positive_new_components64,
        atlas.positive_new_components512
    ]
    components = masker.transform(components_imgs)
    proj, proj_inv, rec = make_projection_matrix(components, scale_bases=True)
    dump(rec, join(get_output_dir(), 'benchmark', 'rec.pkl'))
def compute_global_masker(rootdir):
    '''Define the mask that will be applied onto data'''
    mask = op.join(rootdir, 'spm12/tpm/mask_ICV.nii')
    global_mask = math_img('img>0', img=mask)
    masker = MultiNiftiMasker(global_mask,
                              smoothing_fwhm=1.5,
                              high_pass=1 / 128,
                              t_r=2,
                              detrend=True,
                              standardize=True)
    masker.fit()
    return masker
def compute_all_subjects_mask():
    """ Computes the mask of all the subjects and the sesssions
    """
    masker = MultiNiftiMasker(mask_strategy='epi',
                              memory=CACHE_DIR,
                              memory_level=2,
                              n_jobs=10,
                              verbose=5)

    imgs = dataset.func1 + dataset.func2
    masker.fit(imgs)
    masker.mask_img_.to_filename('all_subjects.nii.gz')
    plot_roi(masker.mask_img_)
Example #16
0
def generate_fmri_data_for_subject(subject, current_ROI):
	"""
	Input : Take as input each fmri file. One file = One block
	Load all fmri data and apply a global mask mak on it. The global mask is computed using the mask from each fmri run (block). 
	Applying a global mask for a subject uniformize the data. 
	Output: Output fmri_runs for a subject, corrected using a global mask
	"""

	# Get all paths for fmri data
	fmri_filenames = sorted(glob.glob(os.path.join(paths.rootpath, 
												"fmri-data/en",
												"sub-%03d" % subject, 
												"func", 
												"resampled*.nii")))
	
	# Process for All brain
	if current_ROI == -1:
		# Get paths for masks
		masks_filenames = sorted(glob.glob(os.path.join(paths.path2Data,
												"en/fmri_data/masks",
												"sub_{}".format(subject),  
												"resample*.pkl")))
		masks = []
		for file in masks_filenames:
			with open(file, 'rb') as f:
				mask = pickle.load(f)
				masks.append(mask)

		# Compute a global mask for all subject. This way the data will be uniform
		global_mask = math_img('img>0.5', img=mean_img(masks))
		masker = MultiNiftiMasker(global_mask, detrend=True, standardize=True)
		masker.fit()

		# Apply the mask to each fmri run (block)
		fmri_runs = [masker.transform(f) for f in tqdm(fmri_filenames)]
	
	# Process for a  specific ROI
	else:
		# get paths of ROIs masks
		masks_ROIs_filenames = sorted(glob.glob(os.path.join(paths.path2Data, 
												"en/ROIs_masks/",
												"*.nii")))
		# Choose the mask 
		ROI_mask = masks_ROIs_filenames[current_ROI]
		ROI_mask = NiftiMasker(ROI_mask, detrend=True, standardize=True)
		ROI_mask.fit()

		# Apply the mask to each fmri run (block)
		fmri_runs = [ROI_mask.transform(f) for f in fmri_filenames]
		
	return fmri_runs
Example #17
0
def test_masker_attributes_with_fit():
    # Test base module at sub-class
    data, mask_img, components, rng = _make_canica_test_data(n_subjects=3)
    # Passing mask_img
    canica = CanICA(n_components=3, mask=mask_img, random_state=0)
    canica.fit(data)
    assert_true(canica.mask_img_ == mask_img)
    assert_true(canica.mask_img_ == canica.masker_.mask_img_)
    # Passing masker
    masker = MultiNiftiMasker(mask_img=mask_img)
    canica = CanICA(n_components=3, mask=masker, random_state=0)
    canica.fit(data)
    assert_true(canica.mask_img_ == canica.masker_.mask_img_)
    canica = CanICA(mask=mask_img, n_components=3)
    assert_raises_regex(
        ValueError, "Object has no components_ attribute. "
        "This is probably because fit has not been called", canica.transform,
        data)
    # Test if raises an error when empty list of provided.
    assert_raises_regex(
        ValueError, 'Need one or more Niimg-like objects as input, '
        'an empty list was given.', canica.fit, [])
    # Test passing masker arguments to estimator
    canica = CanICA(n_components=3,
                    target_affine=np.eye(4),
                    target_shape=(6, 8, 10),
                    mask_strategy='background')
    canica.fit(data)
 def transform_single_imgs(self, imgs, confounds=None, copy=True,
                           mmap_mode=None):
     self._check_fitted()
     if isinstance(imgs, str):
         name, ext = os.path.splitext(imgs)
         if ext == '.npy':
             data = np.load(imgs, mmap_mode=mmap_mode)
         else:
             return MultiNiftiMasker.transform_single_imgs(self, imgs,
                                                           confounds=confounds,
                                                           copy=copy)
     elif isinstance(imgs, np.ndarray):
         data = imgs
     else:
         return MultiNiftiMasker.transform_single_imgs(self, imgs,
                                                       confounds=confounds,
                                                       copy=copy)
     assert (data.ndim == 2 and data.shape[1] == self.mask_size_)
     return data
Example #19
0
def compute_global_masker(files,
                          smoothing_fwhm=None
                          ):  # [[path, path2], [path3, path4]]
    """Returns a MultiNiftiMasker object from list (of list) of files.
    Arguments:
        - files: list (of list of str)
    Returns:
        - masker: MultiNiftiMasker
    """
    masks = [compute_epi_mask(f) for f in files]
    global_mask = math_img(
        'img>0.5',
        img=mean_img(masks))  # take the average mask and threshold at 0.5
    masker = MultiNiftiMasker(global_mask,
                              detrend=True,
                              standardize=True,
                              smoothing_fwhm=smoothing_fwhm)
    masker.fit()
    return masker
def test_check_embedded_nifti_masker():
    owner = OwningClass()
    masker = check_embedded_nifti_masker(owner)
    assert_true(type(masker) is MultiNiftiMasker)

    for mask, multi_subject in (
            (MultiNiftiMasker(), True), (NiftiMasker(), False)):
        owner = OwningClass(mask=mask)
        masker = check_embedded_nifti_masker(owner,
                                             multi_subject=multi_subject)
        assert_equal(type(masker), type(mask))
        for param_key in masker.get_params():
            if param_key not in ['memory', 'memory_level', 'n_jobs',
                                 'verbose']:
                assert_equal(getattr(masker, param_key),
                            getattr(mask, param_key))
            else:
                assert_equal(getattr(masker, param_key),
                            getattr(owner, param_key))

    # Check use of mask as mask_img
    shape = (6, 8, 10, 5)
    affine = np.eye(4)
    mask = nibabel.Nifti1Image(np.ones(shape[:3], dtype=np.int8), affine)
    owner = OwningClass(mask=mask)
    masker = check_embedded_nifti_masker(owner)
    assert_true(masker.mask_img is mask)

    # Check attribute forwarding
    data = np.zeros((9, 9, 9))
    data[2:-2, 2:-2, 2:-2] = 10
    imgs = nibabel.Nifti1Image(data, np.eye(4))
    mask = MultiNiftiMasker()
    mask.fit([[imgs]])
    owner = OwningClass(mask=mask)
    masker = check_embedded_nifti_masker(owner)
    assert_true(masker.mask_img is mask.mask_img_)

    # Check conflict warning
    mask = NiftiMasker(mask_strategy='epi')
    owner = OwningClass(mask=mask)
    assert_warns(UserWarning, check_embedded_nifti_masker, owner)
Example #21
0
def _make_test_data(n_subjects=8, noisy=False):
    rng = np.random.RandomState(0)
    shape = (20, 20, 1)
    components = _make_components(shape)
    if noisy:  # Creating noisy non positive data
        components[rng.randn(*components.shape) > .8] *= -5.
        for component in components:
            assert (component.max() <= -component.min())  # Goal met ?

    # Create a "multi-subject" dataset
    data = _make_data_from_components(components, n_subjects=n_subjects)
    affine = np.eye(4)
    mask_img = nibabel.Nifti1Image(np.ones(shape, dtype=np.int8), affine)
    masker = MultiNiftiMasker(mask_img).fit()
    init = components + 1 * rng.randn(*components.shape)
    components = masker.inverse_transform(components)
    init = masker.inverse_transform(init)
    data = masker.inverse_transform(data)

    return data, mask_img, components, init
Example #22
0
def test_multi_pca():
    # Smoke test the MultiPCA
    # XXX: this is mostly a smoke test
    shape = (6, 8, 10, 5)
    affine = np.eye(4)
    rng = np.random.RandomState(0)

    # Create a "multi-subject" dataset
    data = []
    for i in range(8):
        this_data = rng.normal(size=shape)
        # Create fake activation to get non empty mask
        this_data[2:4, 2:4, 2:4, :] += 10
        data.append(nibabel.Nifti1Image(this_data, affine))

    mask_img = nibabel.Nifti1Image(np.ones(shape[:3], dtype=np.int8), affine)
    multi_pca = MultiPCA(mask=mask_img, n_components=3, random_state=0)

    # Test that the components are the same if we put twice the same data, and
    # that fit output is deterministic
    components1 = multi_pca.fit(data).components_
    components2 = multi_pca.fit(data).components_
    components3 = multi_pca.fit(2 * data).components_
    np.testing.assert_array_equal(components1, components2)
    np.testing.assert_array_almost_equal(components1, components3)

    # Smoke test fit with 'confounds' argument
    confounds = [np.arange(10).reshape(5, 2)] * 8
    multi_pca.fit(data, confounds=confounds)

    # Smoke test that multi_pca also works with single subject data
    multi_pca.fit(data[0])

    # Check that asking for too little components raises a ValueError
    multi_pca = MultiPCA()
    assert_raises(ValueError, multi_pca.fit, data[:2])

    # Smoke test the use of a masker and without CCA
    multi_pca = MultiPCA(mask=MultiNiftiMasker(mask_args=dict(opening=0)),
                         do_cca=False,
                         n_components=3)
    multi_pca.fit(data[:2])

    # Smoke test the transform and inverse_transform
    multi_pca.inverse_transform(multi_pca.transform(data[-2:]))

    # Smoke test to fit with no img
    assert_raises(TypeError, multi_pca.fit)

    multi_pca = MultiPCA(mask=mask_img, n_components=3)
    assert_raises_regex(
        ValueError, "Object has no components_ attribute. "
        "This is probably because fit has not been called",
        multi_pca.transform, data)
Example #23
0
def _check_input_data(data, mask_img=None, return_first_element=False):
    if not isinstance(data, list):
        data = [data]

    if all(isinstance(x, nib.spatialimages.SpatialImage) for x in data):
        masker = MultiNiftiMasker(mask_img)
        data = masker.fit_transform(data)

    elif all(isinstance(x, np.ndarray) for x in data):
        pass

    else:
        raise ValueError('input_data must be an instance of numpy.ndarray or '
                         'nibabel.spatialimages.SpatialImage')

    # when being used for Decode -- the actual image/array is needed
    if return_first_element:
        return data[0]
    else:
        return data
def compute_global_masker(files):  # [[path, path2], [path3, path4]]
    # return a MultiNiftiMasker object

    #spm_dir = '/neurospin/unicog/protocols/IRMf/Meyniel_MarkovGuess_2014'
    #mask = join(spm_dir, 'spm12/tpm/mask_ICV.nii')
    #global_mask = math_img('img>0', img=mask)
    #masker = MultiNiftiMasker(mask_img=global_mask)
    #masker.fit()

    masks = [compute_epi_mask(f) for f in files]
    global_mask = math_img(
        'img>0.5',
        img=mean_img(masks))  # take the average mask and threshold at 0.5
    masker = MultiNiftiMasker(
        global_mask,
        detrend=params.pref.detrend,
        standardize=params.pref.standardize
    )  # return a object that transforms a 4D barin into a 2D matrix of voxel-time and can do the reverse action
    masker.fit()
    return masker
Example #25
0
def test_mask_reducer():
    shape = (6, 8, 10, 5)
    affine = np.eye(4)
    rng = np.random.RandomState(0)

    # Create a "multi-subject" dataset
    imgs = []
    for i in range(8):
        this_img = rng.normal(size=shape)
        # Create fake activation to get non empty mask
        this_img[2:4, 2:4, 2:4, :] += 10
        imgs.append(nibabel.Nifti1Image(this_img, affine))

    mask_img = nibabel.Nifti1Image(np.ones(shape[:3], dtype=np.int8), affine)
    masker = MultiNiftiMasker(mask_img=mask_img).fit()

    # Test fit on multiple image
    data = mask_and_reduce(masker, imgs)
    assert_equal(data.shape, (8 * 5, 6 * 8 * 10))

    data = mask_and_reduce(masker, imgs, n_components=3)
    assert_equal(data.shape, (8 * 3, 6 * 8 * 10))

    data = mask_and_reduce(masker, imgs, reduction_ratio=0.4)
    assert_equal(data.shape, (8 * 2, 6 * 8 * 10))

    # Test on single image
    data_single = mask_and_reduce(masker, imgs[0], n_components=3)
    assert_true(data_single.shape == (3, 6 * 8 * 10))

    # Test n_jobs > 1
    data = mask_and_reduce(masker,
                           imgs[0],
                           n_components=3,
                           n_jobs=2,
                           random_state=0)
    assert_equal(data.shape, (3, 6 * 8 * 10))
    assert_array_almost_equal(data_single, data)

    # Test that reduced data is orthogonal
    data = mask_and_reduce(masker, imgs[0], n_components=3, random_state=0)
    assert_true(data.shape == (3, 6 * 8 * 10))
    cov = data.dot(data.T)
    cov_diag = np.zeros((3, 3))
    for i in range(3):
        cov_diag[i, i] = cov[i, i]
    assert_array_almost_equal(cov, cov_diag)

    # Test reproducibility
    data1 = mask_and_reduce(masker, imgs[0], n_components=3, random_state=0)
    data2 = mask_and_reduce(masker, [imgs[0]] * 2,
                            n_components=3,
                            random_state=0)
    assert_array_almost_equal(np.tile(data1, (2, 1)), data2)
 def __init__(self, mask_img=None, smoothing_fwhm=None,
              standardize=False, detrend=False,
              low_pass=None, high_pass=None, t_r=None,
              target_affine=None, target_shape=None,
              mask_strategy='background', mask_args=None,
              memory=Memory(cachedir=None), memory_level=0,
              n_jobs=1, verbose=0
              ):
     # Mask is provided or computed
     MultiNiftiMasker.__init__(self, mask_img=mask_img, n_jobs=n_jobs,
                               smoothing_fwhm=smoothing_fwhm,
                               standardize=standardize, detrend=detrend,
                               low_pass=low_pass,
                               high_pass=high_pass, t_r=t_r,
                               target_affine=target_affine,
                               target_shape=target_shape,
                               mask_strategy=mask_strategy,
                               mask_args=mask_args,
                               memory=memory,
                               memory_level=memory_level,
                               verbose=verbose)
    def transform_imgs(self, imgs_list, confounds=None, copy=True, n_jobs=1,
                       mmap_mode=None):
        """Prepare multi subject data in parallel

        Parameters
        ----------

        imgs_list: list of Niimg-like objects
            See http://nilearn.github.io/manipulating_images/input_output.html.
            List of imgs file to prepare. One item per subject.

        confounds: list of confounds, optional
            List of confounds (2D arrays or filenames pointing to CSV
            files). Must be of same length than imgs_list.

        copy: boolean, optional
            If True, guarantees that output array has no memory in common with
            input array.

        n_jobs: integer, optional
            The number of cpus to use to do the computation. -1 means
            'all cpus'.

        Returns
        -------
        region_signals: list of 2D numpy.ndarray
            List of signal for each element per subject.
            shape: list of (number of scans, number of elements)
        """
        self._check_fitted()
        raw = True
        # Check whether all imgs from imgs_list are numpy instance, or fallback
        # to MultiNiftiMasker (could handle hybrid imgs_list but we do not
        #  need it for the moment)
        for imgs in imgs_list:
            if isinstance(imgs, str):
                name, ext = os.path.splitext(imgs)
                if ext != '.npy':
                    raw = False
                    break
            elif not isinstance(imgs, np.ndarray):
                raw = False
                break
        if raw:
            data = Parallel(n_jobs=n_jobs)(delayed(np.load)(imgs,
                                                            mmap_mode=mmap_mode)
                                           for imgs in imgs_list)
            return data
        else:
            return MultiNiftiMasker.transform_imgs(self, imgs_list,
                                                   confounds=confounds,
                                                   copy=copy,
                                                   n_jobs=n_jobs, )
Example #28
0
def fetch_masker(masker_path,
                 language,
                 path_to_fmridata,
                 path_to_input,
                 smoothing_fwhm=None,
                 logger=None):
    """ Fetch or compute if needed a global masker from all subjects of a
    given language.
    Arguments:
        - masker_path: str
        - language: str
        - path_to_input: str
        - path_to_fmridata: str
        - smoothing_fwhm: int
        - logger: Logger
    """
    if os.path.exists(masker_path + '.nii.gz') and os.path.exists(masker_path +
                                                                  '.yml'):
        logger.report_state(" loading existing masker...")
        params = read_yaml(masker_path + '.yml')
        mask_img = nib.load(masker_path + '.nii.gz')
        masker = MultiNiftiMasker()
        masker.set_params(**params)
        masker.fit([mask_img])
    else:
        logger.report_state(" recomputing masker...")
        fmri_runs = {}
        subjects = [
            get_subject_name(id) for id in possible_subjects_id(language)
        ]
        for subject in subjects:
            _, fmri_paths = fetch_data(path_to_fmridata, path_to_input,
                                       subject, language)
            fmri_runs[subject] = fmri_paths
        masker = compute_global_masker(list(fmri_runs.values()),
                                       smoothing_fwhm=smoothing_fwhm)
        params = masker.get_params()
        params = {
            key: params[key]
            for key in [
                'detrend', 'dtype', 'high_pass', 'low_pass', 'mask_strategy',
                'memory_level', 'n_jobs', 'smoothing_fwhm', 'standardize',
                't_r', 'verbose'
            ]
        }
        nib.save(masker.mask_img_, masker_path + '.nii.gz')
        save_yaml(params, masker_path + '.yml')
    return masker
Example #29
0
 def __init__(self, masker=MultiNiftiMasker(),
              output_dir=tempfile.gettempdir(),
              glm_model='ols', contrast_type='t', output_z=True,
              output_stat=False, output_effects=False,
              output_variance=False, memory=Memory(cachedir=None),
              target_affine=None, target_shape=None,
              model_tol=1e10,
              n_jobs=1):
     self.masker = masker
     self.output_dir = output_dir
     self.glm_model = glm_model
     self.contrast_type = contrast_type
     self.output_z = output_z
     self.output_stat = output_stat
     self.output_effects = output_effects
     self.output_variance = output_variance
     self.target_affine = target_affine
     self.target_shape = target_shape
     self.model_tol = model_tol
     self.memory = memory
     self.n_jobs = n_jobs
Example #30
0
 def __init__(self,
              data_dir,
              study_id,
              model_id,
              masker=None,
              hrf_model='canonical with derivative',
              drift_model='cosine',
              glm_model='ar1',
              contrast_type='t',
              output_z=True,
              output_stat=False,
              output_effects=False,
              output_variance=False,
              merge_tasks=False,
              resample=False,
              target_affine=None,
              target_shape=None,
              memory=Memory(cachedir=None),
              n_jobs=1):
     self.data_dir = data_dir
     self.study_id = study_id
     self.model_id = model_id
     if masker is None:
         self.masker = MultiNiftiMasker()
     else:
         self.masker = masker
     self.hrf_model = hrf_model
     self.drift_model = drift_model
     self.glm_model = glm_model
     self.contrast_type = contrast_type
     self.output_z = output_z
     self.output_stat = output_stat
     self.output_effects = output_effects
     self.output_variance = output_variance
     self.merge_tasks = merge_tasks
     self.resample = resample
     self.target_affine = target_affine
     self.target_shape = target_shape
     self.memory = memory
     self.n_jobs = n_jobs
Example #31
0
def test_multi_pca():
    # Smoke test the MultiPCA
    # XXX: this is mostly a smoke test
    shape = (6, 8, 10, 5)
    affine = np.eye(4)
    rng = np.random.RandomState(0)

    # Create a "multi-subject" dataset
    data = []
    for i in range(8):
        this_data = rng.normal(size=shape)
        # Create fake activation to get non empty mask
        this_data[2:4, 2:4, 2:4, :] += 10
        data.append(nibabel.Nifti1Image(this_data, affine))

    mask_img = nibabel.Nifti1Image(np.ones(shape[:3], dtype=np.int8), affine)
    multi_pca = MultiPCA(mask=mask_img, n_components=3)

    # Test that the components are the same if we put twice the same data
    components1 = multi_pca.fit(data).components_
    components2 = multi_pca.fit(2 * data).components_
    np.testing.assert_array_almost_equal(components1, components2)

    # Smoke test that multi_pca also works with single subject data
    multi_pca.fit(data[0])

    # Check that asking for too little components raises a ValueError
    multi_pca = MultiPCA()
    nose.tools.assert_raises(ValueError, multi_pca.fit, data[:2])

    # Smoke test the use of a masker and without CCA
    multi_pca = MultiPCA(mask=MultiNiftiMasker(mask_opening=0),
                         do_cca=False,
                         n_components=3)
    multi_pca.fit(data[:2])

    # Smoke test the transform and inverse_transform
    multi_pca.inverse_transform(multi_pca.transform(data[-2:]))
Example #32
0
def test_check_embedded_nifti_masker():
    owner = OwningClass()
    masker = check_embedded_nifti_masker(owner)
    assert type(masker) is MultiNiftiMasker

    for mask, multi_subject in ((MultiNiftiMasker(), True), (NiftiMasker(),
                                                             False)):
        owner = OwningClass(mask=mask)
        masker = check_embedded_nifti_masker(owner,
                                             multi_subject=multi_subject)
        assert type(masker) == type(mask)
        for param_key in masker.get_params():
            if param_key not in [
                    'memory', 'memory_level', 'n_jobs', 'verbose'
            ]:
                assert (getattr(masker, param_key) == getattr(mask, param_key))
            else:
                assert (getattr(masker,
                                param_key) == getattr(owner, param_key))

    # Check use of mask as mask_img
    shape = (6, 8, 10, 5)
    affine = np.eye(4)
    mask = nibabel.Nifti1Image(np.ones(shape[:3], dtype=np.int8), affine)
    owner = OwningClass(mask=mask)
    masker = check_embedded_nifti_masker(owner)
    assert masker.mask_img is mask

    # Check attribute forwarding
    data = np.zeros((9, 9, 9))
    data[2:-2, 2:-2, 2:-2] = 10
    imgs = nibabel.Nifti1Image(data, np.eye(4))
    mask = MultiNiftiMasker()
    mask.fit([[imgs]])
    owner = OwningClass(mask=mask)
    masker = check_embedded_nifti_masker(owner)
    assert masker.mask_img is mask.mask_img_

    # Check conflict warning
    mask = NiftiMasker(mask_strategy='epi')
    owner = OwningClass(mask=mask)
    with pytest.warns(UserWarning):
        check_embedded_nifti_masker(owner)
y_shape = (10, 10)

sys.stderr.write(" Done (%.2fs).\n" % (time.time() - t0))

############################################################################
# Then we prepare and mask the data
# ----------------------------------
import numpy as np
from nilearn.input_data import MultiNiftiMasker

sys.stderr.write("Preprocessing data...")
t0 = time.time()

# Load and mask fMRI data
masker = MultiNiftiMasker(mask_img=miyawaki_dataset.mask,
                          detrend=True,
                          standardize=False)
masker.fit()
X_train = masker.transform(X_random_filenames)
X_test = masker.transform(X_figure_filenames)

# We load the visual stimuli from csv files
y_train = []
for y in y_random_filenames:
    y_train.append(
        np.reshape(np.loadtxt(y, dtype=np.int, delimiter=','),
                   (-1, ) + y_shape,
                   order='F'))

y_test = []
for y in y_figure_filenames:
def create_raw_rest_data(imgs_list,
                         root,
                         raw_dir,
                         masker_params=None,
                         n_jobs=1,
                         mock=False,
                         memory=Memory(cachedir=None),
                         overwrite=False):
    """

    Parameters
    ----------
    memory
    imgs_list: DataFrame with columns filename, confounds
    root
    raw_dir
    masker_params
    n_jobs
    mock

    Returns
    -------

    """
    if masker_params is None:
        masker_params = {}
    masker = MultiNiftiMasker(verbose=1,
                              memory=memory,
                              memory_level=1,
                              **masker_params)
    if masker.mask_img is None:
        masker.fit(imgs_list['filename'])
    else:
        masker.fit()

    if 'confounds' in imgs_list.columns:
        confounds = imgs_list['confounds']
        imgs_list.rename(columns={'confounds': 'orig_confounds'})
    else:
        confounds = repeat(None)

    if not os.path.exists(raw_dir):
        os.makedirs(raw_dir)
    filenames = Parallel(n_jobs=n_jobs)(delayed(_unmask_single_img)(
        masker, imgs, confounds, root, raw_dir, mock=mock, overwrite=overwrite)
                                        for imgs, confounds in zip(
                                            imgs_list['filename'], confounds))
    imgs_list = imgs_list.rename(columns={'filename': 'orig_filename'})
    imgs_list = imgs_list.assign(filename=filenames)
    imgs_list = imgs_list.assign(confounds=None)
    if not mock:
        imgs_list.to_csv(os.path.join(raw_dir, 'data.csv'), mode='w+')
        mask_img_file = os.path.join(raw_dir, 'mask_img.nii.gz')
        masker.mask_img_.to_filename(mask_img_file)
        params = masker.get_params()
        params.pop('memory')
        params.pop('memory_level')
        params.pop('n_jobs')
        params.pop('verbose')
        params['mask_img'] = mask_img_file
        json.dump(params, open(os.path.join(raw_dir, 'masker.json'), 'w+'))
    def fit(self, niimgs=None, y=None, confounds=None, connectivity=None):
        """Compute the mask and the components

        Parameters
        ----------
        niimgs: list of filenames or NiImages
            Data on which the PCA must be calculated. If this is a list,
            the affine is considered the same for all.
        """
        # Hack to support single-subject data:
        if isinstance(niimgs, (basestring, nibabel.Nifti1Image)):
            niimgs = [niimgs]
            # This is a very incomplete hack, as it won't work right for
            # single-subject list of 3D filenames
        # First, learn the mask
        if not isinstance(self.mask, MultiNiftiMasker):
            self.masker_ = MultiNiftiMasker(mask=self.mask,
                                            smoothing_fwhm=self.smoothing_fwhm,
                                            target_affine=self.target_affine,
                                            target_shape=self.target_shape,
                                            low_pass=self.low_pass,
                                            high_pass=self.high_pass,
                                            t_r=self.t_r,
                                            memory=self.memory,
                                            memory_level=self.memory_level)
        else:
            try:
                self.masker_ = clone(self.mask)
            except TypeError as e:
                # Workaround for a joblib bug: in joblib 0.6, a Memory object
                # with cachedir = None cannot be cloned.
                masker_memory = self.mask.memory
                if masker_memory.cachedir is None:
                    self.mask.memory = None
                    self.masker_ = clone(self.mask)
                    self.mask.memory = masker_memory
                    self.masker_.memory = Memory(cachedir=None)
                else:
                    # The error was raised for another reason
                    raise e

            for param_name in ['target_affine', 'target_shape',
                               'smoothing_fwhm', 'low_pass', 'high_pass',
                               't_r', 'memory', 'memory_level']:
                if getattr(self.masker_, param_name) is not None:
                    warnings.warn('Parameter %s of the masker overriden'
                                  % param_name)
                setattr(self.masker_, param_name,
                        getattr(self, param_name))
        if self.masker_.mask is None:
            self.masker_.fit(niimgs)
        else:
            self.masker_.fit()
        self.mask_img_ = self.masker_.mask_img_

        parameters = get_params(MultiNiftiMasker, self)

        # Now compute the covariances

        self.covariances_ = Parallel(n_jobs=self.n_jobs, verbose=self.verbose)(
            delayed(subject_covariance)(
                self.estimator,
                niimg,
                self.masker_.mask_img_,
                parameters,
                memory=self.memory,
                ref_memory_level=self.memory_level,
                confounds=confounds,
                connectivity=connectivity,
                verbose=self.verbose
            )
            for niimg in niimgs)
        return self
Example #36
0
    ]
    components = masker.transform(components_imgs)
    proj, proj_inv, rec = make_projection_matrix(components, scale_bases=True)
    dump(rec, join(get_output_dir(), 'benchmark', 'rec.pkl'))


def load_rec():
    return load(join(get_output_dir(), 'benchmark', 'rec.pkl'))


# compute_rec()

exp_dirs = join(get_output_dir(), 'single_exp', '8')
models = []
rec = load_rec()
mask_img = fetch_mask()
masker = MultiNiftiMasker(mask_img=mask_img).fit()

for exp_dir in [exp_dirs]:
    estimator = load(join(exp_dirs, 'estimator.pkl'))
    transformer = load(join(exp_dirs, 'transformer.pkl'))
    print([(dataset, this_class)
           for dataset, lbin in transformer.lbins_.items()
           for this_class in lbin.classes_])
    coef = estimator.coef_
    coef_rec = coef.dot(rec)
    print(join(exp_dirs, 'maps.nii.gz'))
    imgs = masker.inverse_transform(coef_rec)
    imgs.to_filename(join(exp_dirs, 'maps.nii.gz'))
plot_stat_map(index_img(imgs, 10))
plt.show()
Example #37
0
### Preprocess ################################################################
import nibabel
import os
import pylab as pl
import numpy as np
import time

# CanICA
from nilearn.decomposition.canica import CanICA
from nilearn.input_data import MultiNiftiMasker

# Masker:

masker = MultiNiftiMasker(smoothing_fwhm=6.,
                          memory="nilearn_cache",
                          memory_level=1)
masker.fit(dataset.func)
affine = masker.mask_img_.get_affine()

if not os.path.exists('canica.nii.gz'):
    t0 = time.time()
    canica = CanICA(n_components=n_components, mask=masker,
                    smoothing_fwhm=6.,
                    memory="nilearn_cache", memory_level=1,
                    threshold=None,
                    random_state=1)
    canica.fit(dataset.func)
    print('Canica: %f' % (time.time() - t0))
    canica_components = masker.inverse_transform(canica.components_)
    nibabel.save(canica_components, 'canica.nii.gz')
    import joblib
    from sklearn.base import clone
    import nibabel

    root_dir = "/media/Elements/volatile/new/salma"

    mem = Memory(cachedir=os.path.join(root_dir,
                                       ("storage/workspace/brainpedia"
                                       "/preproc/henson2010faces/dump/")))
    print "Loading all paths and variables into memory"
    df = get_all_paths(root_dir=root_dir,
                       data_set=["henson2010faces"])
    target_affine_ = nibabel.load(df["func"][0]).get_affine()
    target_shape_ = nibabel.load(df["func"][0]).shape[:-1]
    print "preparing and running MultiNiftiMasker"
    mnm = MultiNiftiMasker(mask_strategy="epi", memory=mem, n_jobs=1,
                           verbose=10, target_affine=target_affine_,
                           target_shape=target_shape_)
    mask_img = mnm.fit(list(df["func"])).mask_img_
    print "preparing and running NiftiMapsMasker"
    nmm = NiftiMapsMasker(
        maps_img=os.path.join("/usr/share/fsl/data/atlases/HarvardOxford/",
                              "HarvardOxford-cortl-prob-2mm.nii.gz"),
        mask_img=mask_img, detrend=True, smoothing_fwhm=5, standardize=True,
        low_pass=None, high_pass=None, memory=mem, verbose=10)
    region_ts = [clone(nmm).fit_transform(niimg, n_hv_confounds=5)
                 for niimg in list(df["func"])]
    joblib.dump(region_ts, "/home/storage/workspace/rphlypo/retreat/results/")
    region_signals = DataFrame({"region_signals": region_ts}, index=df.index)
    df.join(region_signals)
X_figure_filenames = miyawaki_dataset.func[:12]
y_random_filenames = miyawaki_dataset.label[12:]
y_figure_filenames = miyawaki_dataset.label[:12]
y_shape = (10, 10)

sys.stderr.write(" Done (%.2fs).\n" % (time.time() - t0))

### Preprocess and mask #######################################################
import numpy as np
from nilearn.input_data import MultiNiftiMasker

sys.stderr.write("Preprocessing data...")
t0 = time.time()

# Load and mask fMRI data
masker = MultiNiftiMasker(mask_img=miyawaki_dataset.mask, detrend=True,
                          standardize=False)
masker.fit()
X_train = masker.transform(X_random_filenames)
X_test = masker.transform(X_figure_filenames)

# Load visual stimuli from csv files
y_train = []
for y in y_random_filenames:
    y_train.append(np.reshape(np.loadtxt(y, dtype=np.int, delimiter=','),
                              (-1,) + y_shape, order='F'))

y_test = []
for y in y_figure_filenames:
    y_test.append(np.reshape(np.loadtxt(y, dtype=np.int, delimiter=','),
                             (-1,) + y_shape, order='F'))
##############################################################################
# We only use the training data of this study,
# where random binary images were shown.

# training data starts after the first 12 files
fmri_random_runs_filenames = dataset.func[12:]
stimuli_random_runs_filenames = dataset.label[12:]

##############################################################################
# We can use :func:`nilearn.input_data.MultiNiftiMasker` to load the fMRI
# data, clean and mask it.

import numpy as np
from nilearn.input_data import MultiNiftiMasker

masker = MultiNiftiMasker(mask_img=dataset.mask, detrend=True,
                          standardize=True)
masker.fit()
fmri_data = masker.transform(fmri_random_runs_filenames)

# shape of the binary (i.e. black and wihte values) image in pixels
stimulus_shape = (10, 10)

# We load the visual stimuli from csv files
stimuli = []
for stimulus_run in stimuli_random_runs_filenames:
    stimuli.append(np.reshape(np.loadtxt(stimulus_run,
                              dtype=np.int, delimiter=','),
                              (-1,) + stimulus_shape, order='F'))

##############################################################################
# Let's take a look at some of these binary images:
X_figure = dataset.func[:12]
y_random = dataset.label[12:]
y_figure = dataset.label[:12]
y_shape = (10, 10)

sys.stderr.write(" Done (%.2fs).\n" % (time.time() - t0))

### Preprocess and mask #######################################################
import numpy as np
from nilearn.input_data import MultiNiftiMasker

sys.stderr.write("Preprocessing data...")
t0 = time.time()

# Load and mask fMRI data
masker = MultiNiftiMasker(mask_img=dataset.mask, detrend=True,
                          standardize=False)
masker.fit()
X_train = masker.transform(X_random)
X_test = masker.transform(X_figure)

# Load visual stimuli from csv files
y_train = []
for y in y_random:
    y_train.append(np.reshape(np.loadtxt(y, dtype=np.int, delimiter=','),
                              (-1,) + y_shape, order='F'))

y_test = []
for y in y_figure:
    y_test.append(np.reshape(np.loadtxt(y, dtype=np.int, delimiter=','),
                             (-1,) + y_shape, order='F'))