Example #1
0
def test_base_decomposition():
    shape = (6, 8, 10, 5)
    affine = np.eye(4)
    rng = np.random.RandomState(0)
    data = []
    for i in range(8):
        this_data = rng.normal(size=shape)
        # Create fake activation to get non empty mask
        this_data[2:4, 2:4, 2:4, :] += 10
        data.append(nibabel.Nifti1Image(this_data, affine))
    mask = nibabel.Nifti1Image(np.ones(shape[:3], dtype=np.int8), affine)
    masker = MultiNiftiMasker(mask_img=mask)
    base_decomposition = BaseDecomposition(mask=masker, n_components=3)
    base_decomposition.fit(data)
    assert_true(base_decomposition.mask_img_ == mask)
    assert_true(
        base_decomposition.mask_img_ == base_decomposition.masker_.mask_img_)

    # Testing fit on data
    masker = MultiNiftiMasker()
    base_decomposition = BaseDecomposition(mask=masker, n_components=3)
    base_decomposition.fit(data)
    assert_true(
        base_decomposition.mask_img_ == base_decomposition.masker_.mask_img_)

    assert_raises_regex(
        ValueError, "Object has no components_ attribute. "
        "This may be because "
        "BaseDecomposition is directly "
        "being used.", base_decomposition.transform, data)
    assert_raises_regex(
        ValueError, 'Need one or more Niimg-like objects as input, '
        'an empty list was given.', base_decomposition.fit, [])
def _uniform_masking(fmri_list, tr, high_pass=0.01, smoothing=5):
    """ Mask all the sessions uniformly, doing standardization, linear
    detrending, DCT high_pas filtering and gaussian smoothing.

    Parameters
    ----------

    fmri_list: array-like
        array containing multiple BOLD data from different sessions

    high_pass: float
        frequency at which to apply the high pass filter, defaults to 0.01

    smoothing: float
        spatial scale of the gaussian smoothing filter in mm, defaults to 5

    Returns
    -------

    fmri_list_masked: array-like
        array containing the masked data

    """
    masker = MultiNiftiMasker(mask_strategy='epi',
                              standardize=True,
                              detrend=True,
                              high_pass=0.01,
                              t_r=tr,
                              smoothing_fwhm=smoothing)
    fmri_list_masked = masker.fit_transform(fmri_list)

    return fmri_list_masked
Example #3
0
def create_raw_contrast_data(imgs,
                             mask,
                             raw_dir,
                             memory=Memory(cachedir=None),
                             n_jobs=1,
                             batch_size=100):
    if not os.path.exists(raw_dir):
        os.makedirs(raw_dir)

    # Selection of contrasts
    masker = MultiNiftiMasker(smoothing_fwhm=0,
                              mask_img=mask,
                              memory=memory,
                              memory_level=1,
                              n_jobs=n_jobs).fit()
    mask_img_file = os.path.join(raw_dir, 'mask_img.nii.gz')
    masker.mask_img_.to_filename(mask_img_file)

    batches = gen_batches(len(imgs), batch_size)

    data = np.empty((len(imgs), masker.mask_img_.get_data().sum()),
                    dtype=np.float32)
    for i, batch in enumerate(batches):
        print('Batch %i' % i)
        data[batch] = masker.transform(imgs['z_map'].values[batch])
    imgs = pd.DataFrame(data=data, index=imgs.index, dtype=np.float32)
    imgs.to_pickle(join(raw_dir, 'imgs.pkl'))
Example #4
0
def generate_fmri_data_for_subject(subject):
    """
	Input : Take as input each fmri file. One file = One block
	Load all fmri data and apply a global mask mak on it. The global mask is computed using the mask from each fmri run (block). 
	Applying a global mask for a subject uniformize the data. 
	Output: Output fmri_runs for a subject, corrected using a global mask
	"""
    fmri_filenames = sorted(
        glob.glob(
            os.path.join(paths.rootpath, "fmri-data/en", "sub-%03d" % subject,
                         "func", "resample*.nii")))

    masks_filenames = sorted(
        glob.glob(
            os.path.join(paths.path2Data, "en/fmri_data/masks",
                         "sub_{}".format(subject), "resample*.pkl")))
    masks = []
    for file in masks_filenames:
        with open(file, 'rb') as f:
            mask = pickle.load(f)
            masks.append(mask)

    global_mask = math_img('img>0.5', img=mean_img(masks))
    masker = MultiNiftiMasker(global_mask, detrend=True, standardize=True)
    masker.fit()
    fmri_runs = [masker.transform(f) for f in tqdm(fmri_filenames)]
    print(fmri_runs[0].shape)

    return fmri_runs
Example #5
0
def test_masker_attributes_with_fit():
    # Test base module at sub-class
    data, mask_img, components, rng = _make_canica_test_data(n_subjects=3)
    # Passing mask_img
    canica = CanICA(n_components=3, mask=mask_img, random_state=0)
    canica.fit(data)
    assert_true(canica.mask_img_ == mask_img)
    assert_true(canica.mask_img_ == canica.masker_.mask_img_)
    # Passing masker
    masker = MultiNiftiMasker(mask_img=mask_img)
    canica = CanICA(n_components=3, mask=masker, random_state=0)
    canica.fit(data)
    assert_true(canica.mask_img_ == canica.masker_.mask_img_)
    canica = CanICA(mask=mask_img, n_components=3)
    assert_raises_regex(
        ValueError, "Object has no components_ attribute. "
        "This is probably because fit has not been called", canica.transform,
        data)
    # Test if raises an error when empty list of provided.
    assert_raises_regex(
        ValueError, 'Need one or more Niimg-like objects as input, '
        'an empty list was given.', canica.fit, [])
    # Test passing masker arguments to estimator
    canica = CanICA(n_components=3,
                    target_affine=np.eye(4),
                    target_shape=(6, 8, 10),
                    mask_strategy='background')
    canica.fit(data)
Example #6
0
def glass_brain(r2_voxels, subject, current_ROI, ROI_name, name):
    """
	Input : Masked results of r2score
	Take masked data and project it again in a 3D space
	Ouput : 3D glassbrain of r2score 
	"""

    # Get one mask and fit it to the corresponding ROI
    if current_ROI != -1 and current_ROI <= 5:
        masks_ROIs_filenames = sorted(
            glob.glob(os.path.join(paths.path2Data, "en/ROIs_masks/",
                                   "*.nii")))
        ROI_mask = masks_ROIs_filenames[current_ROI]
        ROI_mask = NiftiMasker(ROI_mask, detrend=True, standardize=True)
        ROI_mask.fit()
        unmasked_data = ROI_mask.inverse_transform(r2_voxels)

    # Get masks and fit a global mask
    else:
        masks = []
        masks_filenames = sorted(
            glob.glob(
                os.path.join(paths.path2Data, "en/fmri_data/masks",
                             "sub_{}".format(subject), "resample*.pkl")))
        for file in masks_filenames:
            with open(file, 'rb') as f:
                mask = pickle.load(f)
                masks.append(mask)

        global_mask = math_img('img>0.5', img=mean_img(masks))
        masker = MultiNiftiMasker(global_mask, detrend=True, standardize=True)
        masker.fit()
        unmasked_data = masker.inverse_transform(r2_voxels)

    display = plot_glass_brain(unmasked_data,
                               display_mode='lzry',
                               threshold='auto',
                               colorbar=True,
                               title='Sub_{}'.format(subject))
    if not os.path.exists(
            os.path.join(paths.path2Figures, 'glass_brain',
                         'Sub_{}'.format(subject), ROI_name)):
        os.makedirs(
            os.path.join(paths.path2Figures, 'glass_brain',
                         'Sub_{}'.format(subject), ROI_name))

    display.savefig(
        os.path.join(paths.path2Figures, 'glass_brain',
                     'Sub_{}'.format(subject), ROI_name,
                     'R_squared_test_{}.png'.format(name)))
    print(
        'Figure Path : ',
        os.path.join(paths.path2Figures, 'glass_brain',
                     'Sub_{}'.format(subject), ROI_name,
                     'R_squared_test_{}.png'.format(name)))
    display.close()
Example #7
0
def compute_global_masker(rootdir, subjects):
    # masks = [compute_epi_mask(glob.glob(op.join(rootdir, "fmri-data/en", "sub-%03d" % s, "func","*.nii"))) for s in subjects]
    # global_mask = math_img('img>0.5', img=mean_img(masks))
    # masker = MultiNiftiMasker(global_mask, detrend=True, standardize=True, memory='/volatile/tmp')
    masker = MultiNiftiMasker(mask_img=op.join(
        rootdir, "lpp-scripts3/inputs/ROIs/mask_ICV.nii"),
                              detrend=True,
                              standardize=True)
    masker.fit()
    return masker
Example #8
0
def test_check_embedded_nifti_masker():
    owner = OwningClass()
    masker = check_embedded_nifti_masker(owner)
    assert type(masker) is MultiNiftiMasker

    for mask, multi_subject in ((MultiNiftiMasker(), True), (NiftiMasker(),
                                                             False)):
        owner = OwningClass(mask=mask)
        masker = check_embedded_nifti_masker(owner,
                                             multi_subject=multi_subject)
        assert type(masker) == type(mask)
        for param_key in masker.get_params():
            if param_key not in [
                    'memory', 'memory_level', 'n_jobs', 'verbose'
            ]:
                assert (getattr(masker, param_key) == getattr(mask, param_key))
            else:
                assert (getattr(masker,
                                param_key) == getattr(owner, param_key))

    # Check use of mask as mask_img
    shape = (6, 8, 10, 5)
    affine = np.eye(4)
    mask = nibabel.Nifti1Image(np.ones(shape[:3], dtype=np.int8), affine)
    owner = OwningClass(mask=mask)
    masker = check_embedded_nifti_masker(owner)
    assert masker.mask_img is mask

    # Check attribute forwarding
    data = np.zeros((9, 9, 9))
    data[2:-2, 2:-2, 2:-2] = 10
    imgs = nibabel.Nifti1Image(data, np.eye(4))
    mask = MultiNiftiMasker()
    mask.fit([[imgs]])
    owner = OwningClass(mask=mask)
    masker = check_embedded_nifti_masker(owner)
    assert masker.mask_img is mask.mask_img_

    # Check conflict warning
    mask = NiftiMasker(mask_strategy='epi')
    owner = OwningClass(mask=mask)
    with pytest.warns(UserWarning):
        check_embedded_nifti_masker(owner)
Example #9
0
def compute_global_masker(files):  # [[path, path2], [path3, path4]]
    # return a MultiNiftiMasker object
    masks = [compute_epi_mask(f) for f in files]
    global_mask = math_img(
        'img>0.5',
        img=mean_img(masks))  # take the average mask and threshold at 0.5
    masker = MultiNiftiMasker(
        global_mask, detrend=True, standardize=True
    )  # return a object that transforms a 4D barin into a 2D matrix of voxel-time and can do the reverse action
    masker.fit()
    return masker
Example #10
0
def test_mask_reducer():
    shape = (6, 8, 10, 5)
    affine = np.eye(4)
    rng = np.random.RandomState(0)

    # Create a "multi-subject" dataset
    imgs = []
    for i in range(8):
        this_img = rng.normal(size=shape)
        # Create fake activation to get non empty mask
        this_img[2:4, 2:4, 2:4, :] += 10
        imgs.append(nibabel.Nifti1Image(this_img, affine))

    mask_img = nibabel.Nifti1Image(np.ones(shape[:3], dtype=np.int8), affine)
    masker = MultiNiftiMasker(mask_img=mask_img).fit()

    # Test fit on multiple image
    data = mask_and_reduce(masker, imgs)
    assert_equal(data.shape, (8 * 5, 6 * 8 * 10))

    data = mask_and_reduce(masker, imgs, n_components=3)
    assert_equal(data.shape, (8 * 3, 6 * 8 * 10))

    data = mask_and_reduce(masker, imgs, reduction_ratio=0.4)
    assert_equal(data.shape, (8 * 2, 6 * 8 * 10))

    # Test on single image
    data_single = mask_and_reduce(masker, imgs[0], n_components=3)
    assert_true(data_single.shape == (3, 6 * 8 * 10))

    # Test n_jobs > 1
    data = mask_and_reduce(masker,
                           imgs[0],
                           n_components=3,
                           n_jobs=2,
                           random_state=0)
    assert_equal(data.shape, (3, 6 * 8 * 10))
    assert_array_almost_equal(data_single, data)

    # Test that reduced data is orthogonal
    data = mask_and_reduce(masker, imgs[0], n_components=3, random_state=0)
    assert_true(data.shape == (3, 6 * 8 * 10))
    cov = data.dot(data.T)
    cov_diag = np.zeros((3, 3))
    for i in range(3):
        cov_diag[i, i] = cov[i, i]
    assert_array_almost_equal(cov, cov_diag)

    # Test reproducibility
    data1 = mask_and_reduce(masker, imgs[0], n_components=3, random_state=0)
    data2 = mask_and_reduce(masker, [imgs[0]] * 2,
                            n_components=3,
                            random_state=0)
    assert_array_almost_equal(np.tile(data1, (2, 1)), data2)
Example #11
0
def test_multi_pca():
    # Smoke test the MultiPCA
    # XXX: this is mostly a smoke test
    shape = (6, 8, 10, 5)
    affine = np.eye(4)
    rng = np.random.RandomState(0)

    # Create a "multi-subject" dataset
    data = []
    for i in range(8):
        this_data = rng.normal(size=shape)
        # Create fake activation to get non empty mask
        this_data[2:4, 2:4, 2:4, :] += 10
        data.append(nibabel.Nifti1Image(this_data, affine))

    mask_img = nibabel.Nifti1Image(np.ones(shape[:3], dtype=np.int8), affine)
    multi_pca = MultiPCA(mask=mask_img, n_components=3, random_state=0)

    # Test that the components are the same if we put twice the same data, and
    # that fit output is deterministic
    components1 = multi_pca.fit(data).components_
    components2 = multi_pca.fit(data).components_
    components3 = multi_pca.fit(2 * data).components_
    np.testing.assert_array_equal(components1, components2)
    np.testing.assert_array_almost_equal(components1, components3)

    # Smoke test fit with 'confounds' argument
    confounds = [np.arange(10).reshape(5, 2)] * 8
    multi_pca.fit(data, confounds=confounds)

    # Smoke test that multi_pca also works with single subject data
    multi_pca.fit(data[0])

    # Check that asking for too little components raises a ValueError
    multi_pca = MultiPCA()
    assert_raises(ValueError, multi_pca.fit, data[:2])

    # Smoke test the use of a masker and without CCA
    multi_pca = MultiPCA(mask=MultiNiftiMasker(mask_args=dict(opening=0)),
                         do_cca=False,
                         n_components=3)
    multi_pca.fit(data[:2])

    # Smoke test the transform and inverse_transform
    multi_pca.inverse_transform(multi_pca.transform(data[-2:]))

    # Smoke test to fit with no img
    assert_raises(TypeError, multi_pca.fit)

    multi_pca = MultiPCA(mask=mask_img, n_components=3)
    assert_raises_regex(
        ValueError, "Object has no components_ attribute. "
        "This is probably because fit has not been called",
        multi_pca.transform, data)
def compute_global_masker(rootdir, subjects):
    masks = [
        compute_epi_mask(
            glob.glob(
                os.path.join(rootdir, "fmri-data/en", "sub-%03d" % s, "func",
                             "*.nii"))) for s in subjects
    ]
    global_mask = math_img('img>0.5', img=mean_img(masks))
    masker = MultiNiftiMasker(global_mask, detrend=True, standardize=True)
    masker.fit()
    return masker
Example #13
0
def compute_rec():
    mask_img = fetch_mask()
    masker = MultiNiftiMasker(mask_img=mask_img).fit()
    atlas = fetch_atlas_modl()
    components_imgs = [
        atlas.positive_new_components16, atlas.positive_new_components64,
        atlas.positive_new_components512
    ]
    components = masker.transform(components_imgs)
    proj, proj_inv, rec = make_projection_matrix(components, scale_bases=True)
    dump(rec, join(get_output_dir(), 'benchmark', 'rec.pkl'))
def compute_global_masker(rootdir):
    '''Define the mask that will be applied onto data'''
    mask = op.join(rootdir, 'spm12/tpm/mask_ICV.nii')
    global_mask = math_img('img>0', img=mask)
    masker = MultiNiftiMasker(global_mask,
                              smoothing_fwhm=1.5,
                              high_pass=1 / 128,
                              t_r=2,
                              detrend=True,
                              standardize=True)
    masker.fit()
    return masker
def compute_all_subjects_mask():
    """ Computes the mask of all the subjects and the sesssions
    """
    masker = MultiNiftiMasker(mask_strategy='epi',
                              memory=CACHE_DIR,
                              memory_level=2,
                              n_jobs=10,
                              verbose=5)

    imgs = dataset.func1 + dataset.func2
    masker.fit(imgs)
    masker.mask_img_.to_filename('all_subjects.nii.gz')
    plot_roi(masker.mask_img_)
Example #16
0
def generate_fmri_data_for_subject(subject, current_ROI):
	"""
	Input : Take as input each fmri file. One file = One block
	Load all fmri data and apply a global mask mak on it. The global mask is computed using the mask from each fmri run (block). 
	Applying a global mask for a subject uniformize the data. 
	Output: Output fmri_runs for a subject, corrected using a global mask
	"""

	# Get all paths for fmri data
	fmri_filenames = sorted(glob.glob(os.path.join(paths.rootpath, 
												"fmri-data/en",
												"sub-%03d" % subject, 
												"func", 
												"resampled*.nii")))
	
	# Process for All brain
	if current_ROI == -1:
		# Get paths for masks
		masks_filenames = sorted(glob.glob(os.path.join(paths.path2Data,
												"en/fmri_data/masks",
												"sub_{}".format(subject),  
												"resample*.pkl")))
		masks = []
		for file in masks_filenames:
			with open(file, 'rb') as f:
				mask = pickle.load(f)
				masks.append(mask)

		# Compute a global mask for all subject. This way the data will be uniform
		global_mask = math_img('img>0.5', img=mean_img(masks))
		masker = MultiNiftiMasker(global_mask, detrend=True, standardize=True)
		masker.fit()

		# Apply the mask to each fmri run (block)
		fmri_runs = [masker.transform(f) for f in tqdm(fmri_filenames)]
	
	# Process for a  specific ROI
	else:
		# get paths of ROIs masks
		masks_ROIs_filenames = sorted(glob.glob(os.path.join(paths.path2Data, 
												"en/ROIs_masks/",
												"*.nii")))
		# Choose the mask 
		ROI_mask = masks_ROIs_filenames[current_ROI]
		ROI_mask = NiftiMasker(ROI_mask, detrend=True, standardize=True)
		ROI_mask.fit()

		# Apply the mask to each fmri run (block)
		fmri_runs = [ROI_mask.transform(f) for f in fmri_filenames]
		
	return fmri_runs
Example #17
0
def fetch_masker(masker_path,
                 language,
                 path_to_fmridata,
                 path_to_input,
                 smoothing_fwhm=None,
                 logger=None):
    """ Fetch or compute if needed a global masker from all subjects of a
    given language.
    Arguments:
        - masker_path: str
        - language: str
        - path_to_input: str
        - path_to_fmridata: str
        - smoothing_fwhm: int
        - logger: Logger
    """
    if os.path.exists(masker_path + '.nii.gz') and os.path.exists(masker_path +
                                                                  '.yml'):
        logger.report_state(" loading existing masker...")
        params = read_yaml(masker_path + '.yml')
        mask_img = nib.load(masker_path + '.nii.gz')
        masker = MultiNiftiMasker()
        masker.set_params(**params)
        masker.fit([mask_img])
    else:
        logger.report_state(" recomputing masker...")
        fmri_runs = {}
        subjects = [
            get_subject_name(id) for id in possible_subjects_id(language)
        ]
        for subject in subjects:
            _, fmri_paths = fetch_data(path_to_fmridata, path_to_input,
                                       subject, language)
            fmri_runs[subject] = fmri_paths
        masker = compute_global_masker(list(fmri_runs.values()),
                                       smoothing_fwhm=smoothing_fwhm)
        params = masker.get_params()
        params = {
            key: params[key]
            for key in [
                'detrend', 'dtype', 'high_pass', 'low_pass', 'mask_strategy',
                'memory_level', 'n_jobs', 'smoothing_fwhm', 'standardize',
                't_r', 'verbose'
            ]
        }
        nib.save(masker.mask_img_, masker_path + '.nii.gz')
        save_yaml(params, masker_path + '.yml')
    return masker
Example #18
0
def compute_global_masker(files,
                          smoothing_fwhm=None
                          ):  # [[path, path2], [path3, path4]]
    """Returns a MultiNiftiMasker object from list (of list) of files.
    Arguments:
        - files: list (of list of str)
    Returns:
        - masker: MultiNiftiMasker
    """
    masks = [compute_epi_mask(f) for f in files]
    global_mask = math_img(
        'img>0.5',
        img=mean_img(masks))  # take the average mask and threshold at 0.5
    masker = MultiNiftiMasker(global_mask,
                              detrend=True,
                              standardize=True,
                              smoothing_fwhm=smoothing_fwhm)
    masker.fit()
    return masker
Example #19
0
def _make_test_data(n_subjects=8, noisy=False):
    rng = np.random.RandomState(0)
    shape = (20, 20, 1)
    components = _make_components(shape)
    if noisy:  # Creating noisy non positive data
        components[rng.randn(*components.shape) > .8] *= -5.
        for component in components:
            assert (component.max() <= -component.min())  # Goal met ?

    # Create a "multi-subject" dataset
    data = _make_data_from_components(components, n_subjects=n_subjects)
    affine = np.eye(4)
    mask_img = nibabel.Nifti1Image(np.ones(shape, dtype=np.int8), affine)
    masker = MultiNiftiMasker(mask_img).fit()
    init = components + 1 * rng.randn(*components.shape)
    components = masker.inverse_transform(components)
    init = masker.inverse_transform(init)
    data = masker.inverse_transform(data)

    return data, mask_img, components, init
Example #20
0
def _check_input_data(data, mask_img=None, return_first_element=False):
    if not isinstance(data, list):
        data = [data]

    if all(isinstance(x, nib.spatialimages.SpatialImage) for x in data):
        masker = MultiNiftiMasker(mask_img)
        data = masker.fit_transform(data)

    elif all(isinstance(x, np.ndarray) for x in data):
        pass

    else:
        raise ValueError('input_data must be an instance of numpy.ndarray or '
                         'nibabel.spatialimages.SpatialImage')

    # when being used for Decode -- the actual image/array is needed
    if return_first_element:
        return data[0]
    else:
        return data
def compute_global_masker(files):  # [[path, path2], [path3, path4]]
    # return a MultiNiftiMasker object

    #spm_dir = '/neurospin/unicog/protocols/IRMf/Meyniel_MarkovGuess_2014'
    #mask = join(spm_dir, 'spm12/tpm/mask_ICV.nii')
    #global_mask = math_img('img>0', img=mask)
    #masker = MultiNiftiMasker(mask_img=global_mask)
    #masker.fit()

    masks = [compute_epi_mask(f) for f in files]
    global_mask = math_img(
        'img>0.5',
        img=mean_img(masks))  # take the average mask and threshold at 0.5
    masker = MultiNiftiMasker(
        global_mask,
        detrend=params.pref.detrend,
        standardize=params.pref.standardize
    )  # return a object that transforms a 4D barin into a 2D matrix of voxel-time and can do the reverse action
    masker.fit()
    return masker
Example #22
0
 def __init__(self, masker=MultiNiftiMasker(),
              output_dir=tempfile.gettempdir(),
              glm_model='ols', contrast_type='t', output_z=True,
              output_stat=False, output_effects=False,
              output_variance=False, memory=Memory(cachedir=None),
              target_affine=None, target_shape=None,
              model_tol=1e10,
              n_jobs=1):
     self.masker = masker
     self.output_dir = output_dir
     self.glm_model = glm_model
     self.contrast_type = contrast_type
     self.output_z = output_z
     self.output_stat = output_stat
     self.output_effects = output_effects
     self.output_variance = output_variance
     self.target_affine = target_affine
     self.target_shape = target_shape
     self.model_tol = model_tol
     self.memory = memory
     self.n_jobs = n_jobs
Example #23
0
 def __init__(self,
              data_dir,
              study_id,
              model_id,
              masker=None,
              hrf_model='canonical with derivative',
              drift_model='cosine',
              glm_model='ar1',
              contrast_type='t',
              output_z=True,
              output_stat=False,
              output_effects=False,
              output_variance=False,
              merge_tasks=False,
              resample=False,
              target_affine=None,
              target_shape=None,
              memory=Memory(cachedir=None),
              n_jobs=1):
     self.data_dir = data_dir
     self.study_id = study_id
     self.model_id = model_id
     if masker is None:
         self.masker = MultiNiftiMasker()
     else:
         self.masker = masker
     self.hrf_model = hrf_model
     self.drift_model = drift_model
     self.glm_model = glm_model
     self.contrast_type = contrast_type
     self.output_z = output_z
     self.output_stat = output_stat
     self.output_effects = output_effects
     self.output_variance = output_variance
     self.merge_tasks = merge_tasks
     self.resample = resample
     self.target_affine = target_affine
     self.target_shape = target_shape
     self.memory = memory
     self.n_jobs = n_jobs
Example #24
0
def test_multi_pca():
    # Smoke test the MultiPCA
    # XXX: this is mostly a smoke test
    shape = (6, 8, 10, 5)
    affine = np.eye(4)
    rng = np.random.RandomState(0)

    # Create a "multi-subject" dataset
    data = []
    for i in range(8):
        this_data = rng.normal(size=shape)
        # Create fake activation to get non empty mask
        this_data[2:4, 2:4, 2:4, :] += 10
        data.append(nibabel.Nifti1Image(this_data, affine))

    mask_img = nibabel.Nifti1Image(np.ones(shape[:3], dtype=np.int8), affine)
    multi_pca = MultiPCA(mask=mask_img, n_components=3)

    # Test that the components are the same if we put twice the same data
    components1 = multi_pca.fit(data).components_
    components2 = multi_pca.fit(2 * data).components_
    np.testing.assert_array_almost_equal(components1, components2)

    # Smoke test that multi_pca also works with single subject data
    multi_pca.fit(data[0])

    # Check that asking for too little components raises a ValueError
    multi_pca = MultiPCA()
    nose.tools.assert_raises(ValueError, multi_pca.fit, data[:2])

    # Smoke test the use of a masker and without CCA
    multi_pca = MultiPCA(mask=MultiNiftiMasker(mask_opening=0),
                         do_cca=False,
                         n_components=3)
    multi_pca.fit(data[:2])

    # Smoke test the transform and inverse_transform
    multi_pca.inverse_transform(multi_pca.transform(data[-2:]))
y_shape = (10, 10)

sys.stderr.write(" Done (%.2fs).\n" % (time.time() - t0))

############################################################################
# Then we prepare and mask the data
# ----------------------------------
import numpy as np
from nilearn.input_data import MultiNiftiMasker

sys.stderr.write("Preprocessing data...")
t0 = time.time()

# Load and mask fMRI data
masker = MultiNiftiMasker(mask_img=miyawaki_dataset.mask,
                          detrend=True,
                          standardize=False)
masker.fit()
X_train = masker.transform(X_random_filenames)
X_test = masker.transform(X_figure_filenames)

# We load the visual stimuli from csv files
y_train = []
for y in y_random_filenames:
    y_train.append(
        np.reshape(np.loadtxt(y, dtype=np.int, delimiter=','),
                   (-1, ) + y_shape,
                   order='F'))

y_test = []
for y in y_figure_filenames:
def create_raw_rest_data(imgs_list,
                         root,
                         raw_dir,
                         masker_params=None,
                         n_jobs=1,
                         mock=False,
                         memory=Memory(cachedir=None),
                         overwrite=False):
    """

    Parameters
    ----------
    memory
    imgs_list: DataFrame with columns filename, confounds
    root
    raw_dir
    masker_params
    n_jobs
    mock

    Returns
    -------

    """
    if masker_params is None:
        masker_params = {}
    masker = MultiNiftiMasker(verbose=1,
                              memory=memory,
                              memory_level=1,
                              **masker_params)
    if masker.mask_img is None:
        masker.fit(imgs_list['filename'])
    else:
        masker.fit()

    if 'confounds' in imgs_list.columns:
        confounds = imgs_list['confounds']
        imgs_list.rename(columns={'confounds': 'orig_confounds'})
    else:
        confounds = repeat(None)

    if not os.path.exists(raw_dir):
        os.makedirs(raw_dir)
    filenames = Parallel(n_jobs=n_jobs)(delayed(_unmask_single_img)(
        masker, imgs, confounds, root, raw_dir, mock=mock, overwrite=overwrite)
                                        for imgs, confounds in zip(
                                            imgs_list['filename'], confounds))
    imgs_list = imgs_list.rename(columns={'filename': 'orig_filename'})
    imgs_list = imgs_list.assign(filename=filenames)
    imgs_list = imgs_list.assign(confounds=None)
    if not mock:
        imgs_list.to_csv(os.path.join(raw_dir, 'data.csv'), mode='w+')
        mask_img_file = os.path.join(raw_dir, 'mask_img.nii.gz')
        masker.mask_img_.to_filename(mask_img_file)
        params = masker.get_params()
        params.pop('memory')
        params.pop('memory_level')
        params.pop('n_jobs')
        params.pop('verbose')
        params['mask_img'] = mask_img_file
        json.dump(params, open(os.path.join(raw_dir, 'masker.json'), 'w+'))
Example #27
0
    ]
    components = masker.transform(components_imgs)
    proj, proj_inv, rec = make_projection_matrix(components, scale_bases=True)
    dump(rec, join(get_output_dir(), 'benchmark', 'rec.pkl'))


def load_rec():
    return load(join(get_output_dir(), 'benchmark', 'rec.pkl'))


# compute_rec()

exp_dirs = join(get_output_dir(), 'single_exp', '8')
models = []
rec = load_rec()
mask_img = fetch_mask()
masker = MultiNiftiMasker(mask_img=mask_img).fit()

for exp_dir in [exp_dirs]:
    estimator = load(join(exp_dirs, 'estimator.pkl'))
    transformer = load(join(exp_dirs, 'transformer.pkl'))
    print([(dataset, this_class)
           for dataset, lbin in transformer.lbins_.items()
           for this_class in lbin.classes_])
    coef = estimator.coef_
    coef_rec = coef.dot(rec)
    print(join(exp_dirs, 'maps.nii.gz'))
    imgs = masker.inverse_transform(coef_rec)
    imgs.to_filename(join(exp_dirs, 'maps.nii.gz'))
plot_stat_map(index_img(imgs, 10))
plt.show()
Example #28
0
import matplotlib.pyplot as plt
from nilearn.plotting import plot_stat_map
from fetch_data import datasets

CACHE_DIR = '/home/mr234268/data'

dataset = datasets.fetch_adni_rs_fmri()
func_files = dataset['func']
dx_group = dataset['dx_group']

n_sample = 140
idx = np.random.randint(len(func_files), size=n_sample)
func_files_sample = np.array(func_files)[idx]

multi_masker = MultiNiftiMasker(mask_strategy='epi',
                                memory=CACHE_DIR,
                                n_jobs=1,
                                memory_level=2)
multi_masker.fit(func_files_sample)
plot_img(multi_masker.mask_img_)

n_components = 40
canica = CanICA(mask=multi_masker,
                n_components=n_components,
                smoothing_fwhm=6.,
                memory=CACHE_DIR,
                memory_level=5,
                threshold=3.,
                verbose=10,
                random_state=0)
canica.fit(func_files_sample)
Example #29
0
import os
from nilearn.input_data import MultiNiftiMasker # NiftiMasker
from nilearn.plotting import plot_roi, show
from nilearn.image.image import mean_img


# We first create a masker, giving it the options that we care
# about. Here we use standardizing of the data, as it is often important
# for decoding
mask_filename = os.getcwd() + "/dataset/train/Patient_01/GT.nii.gz"
scan_filename = os.getcwd() + "/dataset/train/Patient_01/Patient_01.nii.gz"

masker = MultiNiftiMasker(mask_img=mask_filename, standardize=True)
print(masker)


# We give the masker a filename and retrieve a 2D array ready
# for machine learning with scikit-learn

masker.fit(scan_filename)
#masker.transform(scan_filename)
scan_masked = masker.fit_transform(scan_filename)

# calculate mean image for the background
mean_func_img = mean_img(scan_filename)
'''
plot_roi(masker.mask_img_, mean_func_img, display_mode='y', cut_coords=4, title="Mask")
show()
'''
# maxes = np.max(labelArray, axis=0)
# calculate mean image for the background
Example #30
0
func_filenames = adhd_dataset.func
print("Functional nifti image: {0}...{1} ({2})".format(func_filenames[0],
                                                       func_filenames[1],
                                                       len(func_filenames)))

# Build an EPI-based mask because we have no anatomical data
if not os.path.isfile(MASKFILE):
    target_img = nibabel.load(func_filenames[0])
    mask = (target_img.get_data()[..., 0] != 0).astype(int)
    mask_img = nibabel.Nifti1Image(mask, target_img.affine)
    nibabel.save(mask_img, MASKFILE)
else:
    mask_img = nibabel.load(MASKFILE)

# Mask and preproc EPI data
masker = MultiNiftiMasker(mask_img=mask_img, standardize=True)
masker.fit()
if not os.path.isfile(DATAFILE):
    y = np.concatenate(masker.transform(func_filenames), axis=0)
    print(y.shape)
    np.save(DATAFILE, y)
else:
    y = np.load(DATAFILE)
iterator = masker.inverse_transform(y).get_fdata()
iterator = iterator.transpose((3, 0, 1, 2))
iterator = np.expand_dims(iterator, axis=1)
print(iterator.shape)

# Data iterator
manager = DataManager.from_numpy(train_inputs=iterator,
                                 batch_size=BATCH_SIZE,