Ejemplo n.º 1
0
def display_maps(fig, components, index=0):
    components = check_niimg(components)
    ax = fig.add_subplot(2, 1, 1)
    plot_prob_atlas(components, view_type="filled_contours", axes=ax)
    ax = fig.add_subplot(2, 1, 2)
    plot_stat_map(index_img(components, index),
                  axes=ax,
                  colorbar=False,
                  threshold=0)
    return fig
Ejemplo n.º 2
0
def test_plot_prob_atlas(params):
    """Smoke tests for plot_prob_atlas.

    Tests different combinations of parameters `view_type`, `threshold`,
    and `colorbar`.
    """
    rng = np.random.RandomState(42)
    data_rng = rng.normal(size=(6, 8, 10, 5))
    plot_prob_atlas(Nifti1Image(data_rng, np.eye(4)), **params)
    plt.close()
Ejemplo n.º 3
0
 def save_prob_atlas(x, out_path):
     img = nl.image.new_img_like(mask,
                                 x.numpy(),
                                 affine=mask.affine,
                                 copy_header=True)
     nlplt.plot_prob_atlas(img,
                           bg_img=bg_img,
                           view_type=view_type,
                           draw_cross=draw_cross,
                           threshold=threshold)
     plt.savefig(out_path)
Ejemplo n.º 4
0
def savefig_regions(regions_percentile_img, inputfile, outputfile):
    fig = plt.figure()
    title = ("ROIs using percentile thresholding. "
             "\n Each ROI in same color is an extracted region")
    plotting.plot_prob_atlas(regions_percentile_img,
                             bg_img=inputfile,
                             view_type='contours',
                             display_mode='z',
                             cut_coords=10,
                             title=title,
                             figure=fig)
    plt.savefig(outputfile + '_region_extraction.png')
Ejemplo n.º 5
0
def run(idx, reduction, alpha, mask, raw, n_components, init, func_filenames):
    output_dir = join(trace_folder, 'experiment_%i' % idx)
    try:
        os.makedirs(output_dir)
    except OSError:
        pass
    dict_fact = SpcaFmri(mask=mask,
                         smoothing_fwhm=3,
                         batch_size=40,
                         shelve=not raw,
                         n_components=n_components,
                         replacement=False,
                         dict_init=fetch_atlas_smith_2009().rsn70 if
                         init else None,
                         reduction=reduction,
                         alpha=alpha,
                         random_state=0,
                         n_epochs=2,
                         l1_ratio=0.5,
                         backend='c',
                         memory=expanduser("~/nilearn_cache"), memory_level=2,
                         verbose=5,
                         n_jobs=1,
                         trace_folder=output_dir
                         )

    print('[Example] Learning maps')
    t0 = time.time()
    dict_fact.fit(func_filenames, raw=raw)
    t1 = time.time() - t0
    print('[Example] Dumping results')
    # Decomposition estimator embeds their own masker
    masker = dict_fact.masker_
    components_img = masker.inverse_transform(dict_fact.components_)
    components_img.to_filename(join(output_dir, 'components_final.nii.gz'))
    print('[Example] Run in %.2f s' % t1)
    # Show components from both methods using 4D plotting tools
    import matplotlib.pyplot as plt
    from nilearn.plotting import plot_prob_atlas, show

    print('[Example] Displaying')
    fig, axes = plt.subplots(2, 1)
    plot_prob_atlas(components_img, view_type="filled_contours",
                    axes=axes[0])
    plot_stat_map(index_img(components_img, 0),
                  axes=axes[1],
                  colorbar=False,
                  threshold=0)
    plt.savefig(join(output_dir, 'components.pdf'))
    show()
Ejemplo n.º 6
0
def run(idx, reduction, alpha, mask, raw, n_components, init, func_filenames):
    output_dir = join(trace_folder, 'experiment_%i' % idx)
    try:
        os.makedirs(output_dir)
    except OSError:
        pass
    dict_fact = SpcaFmri(
        mask=mask,
        smoothing_fwhm=3,
        batch_size=40,
        shelve=not raw,
        n_components=n_components,
        replacement=False,
        dict_init=fetch_atlas_smith_2009().rsn70 if init else None,
        reduction=reduction,
        alpha=alpha,
        random_state=0,
        n_epochs=2,
        l1_ratio=0.5,
        backend='c',
        memory=expanduser("~/nilearn_cache"),
        memory_level=2,
        verbose=5,
        n_jobs=1,
        trace_folder=output_dir)

    print('[Example] Learning maps')
    t0 = time.time()
    dict_fact.fit(func_filenames, raw=raw)
    t1 = time.time() - t0
    print('[Example] Dumping results')
    # Decomposition estimator embeds their own masker
    masker = dict_fact.masker_
    components_img = masker.inverse_transform(dict_fact.components_)
    components_img.to_filename(join(output_dir, 'components_final.nii.gz'))
    print('[Example] Run in %.2f s' % t1)
    # Show components from both methods using 4D plotting tools
    import matplotlib.pyplot as plt
    from nilearn.plotting import plot_prob_atlas, show

    print('[Example] Displaying')
    fig, axes = plt.subplots(2, 1)
    plot_prob_atlas(components_img, view_type="filled_contours", axes=axes[0])
    plot_stat_map(index_img(components_img, 0),
                  axes=axes[1],
                  colorbar=False,
                  threshold=0)
    plt.savefig(join(output_dir, 'components.pdf'))
    show()
Ejemplo n.º 7
0
    def plot_all(self, pred, N=40, save=False, item_file='group', name='vmf'):

        data = np.zeros((N, pred.shape[0]))
        total = 0
        for i in range(N):
            data[i][pred != i] = 0
            data[i][pred == i] = 1
            total += data[i][data[i] != 0].shape[0]

        print(total)

        if hasattr(self, "masker_"):
            self.components_img_ = self.masker_.inverse_transform(data)

        components_img = self.components_img_
        warnings.filterwarnings("ignore")
        display = plot_prob_atlas(components_img,
                                  title='All components',
                                  view_type='filled_contours')
        if save:
            re_path = '{}/brain/{}/{}'.format(RESULT_DIR, name, item_file)
            if not os.path.exists(re_path):
                os.makedirs(re_path)
            display.savefig('{}/all.png'.format(re_path), dpi=200)
        else:
            show()
Ejemplo n.º 8
0
def plot_net(components):
    from nilearn.plotting import plot_prob_atlas
    from nilearn.image import iter_img
    from nilearn.plotting import plot_stat_map, show

    components_img = masker.inverse_transform(components)

    # Plot all ICA components together
    plot_prob_atlas(components_img, title="All ICA components")

    for i, cur_img in enumerate(iter_img(components_img)):
        plot_stat_map(cur_img,
                      display_mode="z",
                      title="IC %d" % i,
                      cut_coords=10,
                      colorbar=False)

    show()
Ejemplo n.º 9
0
def plot_all_components(components_img, **kwargs):
    """ Plot the components IC spatial maps in only one brain. """
    from nilearn.plotting import plot_prob_atlas
    from matplotlib import pyplot as plt

    fig = plt.figure(facecolor='white')
    p = plot_prob_atlas(components_img, figure=fig, draw_cross=False, **kwargs)
    p.close()

    return fig
Ejemplo n.º 10
0
def plot_all_components(components_img, **kwargs):
    """ Plot the components IC spatial maps in only one brain. """
    from nilearn.plotting import plot_prob_atlas
    from matplotlib import pyplot as plt

    fig = plt.figure(facecolor='white')
    p   = plot_prob_atlas(components_img, figure=fig, draw_cross=False, **kwargs)
    p.close()

    return fig
def pauli_atlas(views, coords):
    # Creates subcortical parcellation maps based on Pauli 2018
    # Definition of datestamp as the current date
    date = pd.to_datetime('today').strftime("%d_%m_%Y_")
    # Upload the atlas of choice, for other atlases see documentation 8.2.11: Visualizing 4D probabilistic atlas maps
    subcortex = datasets.fetch_atlas_pauli_2017()
    atlas_types = {
        'Pauli2018 Subcortical Atlas': subcortex.maps,
    }
    # Create the image using the plot_prob_atlas function
    for name, atlas in sorted(atlas_types.items()):
        for v in views:
            date_stamp = os.path.join('/home/lauri/Documents/temp/' + date +
                                      views[v] + '_pauli2018.jpeg')
            cut_coords = coords
            plotting.plot_prob_atlas(atlas,
                                     cut_coords=cut_coords,
                                     view_type='continuous',
                                     display_mode=v,
                                     black_bg=True,
                                     colorbar=True)
            plt.savefig(date_stamp, dpi=600)
Ejemplo n.º 12
0
    def plot_pro(self,
                 ita,
                 save=False,
                 item_file='group',
                 name='vmf',
                 choose=None,
                 cut_coords=None):

        for component in ita:
            if component.max() < -component.min():
                component *= -1
        if hasattr(self, "masker_"):
            self.components_img_ = self.masker_.inverse_transform(ita)

        components_img = self.components_img_
        warnings.filterwarnings("ignore")
        display = plot_prob_atlas(components_img,
                                  title='All components',
                                  view_type='filled_contours')
        if save:
            display.savefig('{}/brain/{}/{}/pro.png'.format(
                RESULT_DIR, name, item_file),
                            dpi=200)

        for i, cur_img in enumerate(iter_img(components_img)):
            if cut_coords is not None:
                display = plot_stat_map(cur_img,
                                        cut_coords=cut_coords[i],
                                        dim=-.5,
                                        threshold=1e-3,
                                        cmap=plt.get_cmap('autumn'))
            else:
                display = plot_stat_map(cur_img,
                                        dim=-.5,
                                        threshold=1e-3,
                                        cmap=plt.get_cmap('autumn'))
            if save:
                if choose is not None:
                    display.savefig('{}/brain/{}/{}/item{}.png'.format(
                        RESULT_DIR, name, item_file, choose[i] + 1),
                                    dpi=200)
                else:
                    display.savefig('{}/brain/{}/{}/item{}.png'.format(
                        RESULT_DIR, name, item_file, i + 1),
                                    dpi=200)
        if save is False:
            show()
Ejemplo n.º 13
0
    def plot_all(self, pred, save=False, item_file='group', name='vmf', epoch=0):

        data = np.zeros((self.n_cluster, pred.shape[0]))
        total = 0
        for i in range(self.n_cluster):
            data[i][pred != i] = 0
            data[i][pred == i] = 1
            total += data[i][data[i] != 0].shape[0]

        print(total)

        if hasattr(self, "masker_"):
            self.components_img_ = self.masker_.inverse_transform(data)

        components_img = self.components_img_
        warnings.filterwarnings("ignore")
        display = plot_prob_atlas(components_img, title='All components', view_type='filled_contours')
        if save:
            path = '{}/brain/{}/{}/'.format(RESULT_DIR, name, item_file)
            os.makedirs(path, exist_ok=True)
            display.savefig(os.path.join(path, 'all_{}.png'.format(epoch)), dpi=200)
        else:
            show()
def yeo2015_atlas(views, coords):
    # Creates task-based parcellation maps based on Yeo 2015
    # Definition of datestamp as the current date
    date = pd.to_datetime('today').strftime("%d_%m_%Y_")
    # Upload the atlas of choice. The Yeo 2015 atlas is not included in nilearn datasets and must be uploaded manually
    task_parc = image.load_img(
        '/usr/local/freesurfer/average/Yeo_Brainmap_MNI152/Yeo_12Comp_PrActGivenComp_FSL_MNI152_2mm.nii.gz'
    )
    # Create the image using the plot_prob_atlas function
    for v in views:
        date_stamp = os.path.join('/home/lauri/Documents/temp/' + date +
                                  views[v] + '_yeo2015.jpeg')
        cut_coords = coords
        choi_parc = plotting.plot_prob_atlas(task_parc,
                                             cut_coords=cut_coords,
                                             colorbar=True,
                                             vmin=1,
                                             vmax=12,
                                             cmap=plt.cm.get_cmap(
                                                 'tab20b', 12),
                                             view_type='filled_contours',
                                             display_mode=v,
                                             black_bg=True)
        plt.savefig(date_stamp, dpi=600)
# Canonical ICA decomposition of functional datasets by importing CanICA from
# decomposition module
from nilearn.decomposition import CanICA

# Initialize canica parameters
canica = CanICA(n_components=5, smoothing_fwhm=6.0, memory="nilearn_cache", memory_level=2, random_state=0)
# Fit to the data
canica.fit(func_filenames)
# ICA maps
components_img = canica.masker_.inverse_transform(canica.components_)

# Visualization
# Show ICA maps by using plotting utilities
from nilearn import plotting

plotting.plot_prob_atlas(components_img, view_type="filled_contours", title="ICA components")

################################################################################
# Extracting regions from ICA maps and then timeseries signals from those
# regions, both can be done by importing Region Extractor from regions module.
# threshold=0.5 indicates that we keep nominal of amount nonzero voxels across all
# maps, less the threshold means that more intense non-voxels will be survived.
from nilearn.regions import RegionExtractor

extractor = RegionExtractor(
    components_img,
    threshold=0.5,
    thresholding_strategy="ratio_n_voxels",
    extractor="local_regions",
    standardize=True,
    min_region_size=1350,
Ejemplo n.º 16
0
pl.figure()
pl.imshow(np.arange(1,
                    autom_data.max() + 1)[np.newaxis, :],
          cmap=pl.cm.gist_rainbow)
pl.xticks(range(5),
          ['Cluster 1', 'Cluster 2', 'Cluster 3', 'Cluster 4', 'Cluster 5'])

# LANL

# Clustering sara
nil.plot_prob_atlas(
    ni.load(os.path.join(path, "automatic_mask_lanl_nilearn.nii.gz")),
    anat_img=ni.load("phantoms/Coregistration/BREAKBEN/lanl/lanl_hf.nii.gz"),
    view_type='filled_contours',
    alpha=0.3,
    draw_cross=False,
    display_mode='z',
    threshold=2,
    annotate=False)

# Manual clustering
nil.plot_prob_atlas(
    ni.load(os.path.join(path, "manual_mask_lanl_nilearn.nii.gz")),
    anat_img=ni.load("phantoms/Coregistration/BREAKBEN/lanl/lanl_hf.nii.gz"),
    view_type='contours',
    draw_cross=False,
    display_mode='z',
    threshold=None,
    linewidths=1,
    annotate=False)
######################################################################
# Atlases
# =======

destrieux = datasets.fetch_atlas_destrieux_2009()
plotting.view_img(destrieux['maps'],
                  resampling_interpolation='nearest',
                  cmap='gist_ncar', symmetric_cmap=False, colorbar=False)
plotting.plot_roi(destrieux['maps'])


######################################################################
# Harvard-Oxford probabilistic (4D) atlas
harvard_oxford = datasets.fetch_atlas_harvard_oxford('cort-prob-2mm')
plotting.plot_prob_atlas(harvard_oxford['maps'])


######################################################################
surf_destrieux = datasets.fetch_atlas_surf_destrieux()
fsaverage = datasets.fetch_surf_fsaverage()
plotting.view_surf(fsaverage['pial_left'], surf_destrieux['map_left'],
                   cmap='gist_ncar', colorbar=False)


######################################################################

# not needed with master
from nilearn import surface
fsaverage['sulc_left'] = surface.load_surf_data(fsaverage['sulc_left'])
Ejemplo n.º 18
0
    def plot_pro(self,
                 ita,
                 save=False,
                 item_file='group',
                 name='vmf',
                 choose=None,
                 cut_coords=None,
                 display_mode='ortho',
                 belong='1'):

        re_path = '{}/brain/{}/{}'.format(RESULT_DIR, name, item_file)
        if not os.path.exists(re_path):
            os.makedirs(re_path)

        for component in ita:
            if component.max() < -component.min():
                component *= -1
        if hasattr(self, "masker_"):
            self.components_img_ = self.masker_.inverse_transform(ita)

        components_img = self.components_img_
        warnings.filterwarnings("ignore")
        display = plot_prob_atlas(components_img,
                                  title='All components',
                                  view_type='filled_contours')
        if save:
            display.savefig('{}/pro.png'.format(re_path), dpi=200)

        name = ['vmf-py', 'vmf-dp', 'gmm-dp']
        for i, cur_img in enumerate(iter_img(components_img)):
            if cut_coords is None:
                display = plot_stat_map(cur_img,
                                        dim=-.5,
                                        display_mode=display_mode,
                                        threshold=1e-2,
                                        cmap=plt.get_cmap('autumn'))
            else:
                display = plot_stat_map(cur_img,
                                        cut_coords=cut_coords,
                                        display_mode=display_mode,
                                        dim=-.5,
                                        threshold=1e-2,
                                        cmap=plt.get_cmap('autumn'))
            if save:
                if choose is not None and belong is not None:
                    display.savefig('{}/{}-{}-item{}.png'.format(
                        re_path, name[i], belong, choose[i]),
                                    dpi=200)
                elif choose is not None:
                    display.savefig('{}/item{}.png'.format(
                        re_path, choose[i] + 1),
                                    dpi=200)
                elif belong is not None:
                    display.savefig('{}/{}-item{}.png'.format(
                        re_path, belong, i + 1),
                                    dpi=200)
                else:
                    display.savefig('{}/item{}.png'.format(re_path, i + 1),
                                    dpi=200)
        if save is False:
            show()
Ejemplo n.º 19
0
    # is not implemented. See Note section above for details.
    components_img = estimator.components_img_
    components_img.to_filename('%s_resting_state.nii.gz' % names[estimator])
    components_imgs.append(components_img)

###############################################################################
# Visualize the results
# ----------------------
from nilearn.plotting import (plot_prob_atlas, find_xyz_cut_coords, show,
                              plot_stat_map)
from nilearn.image import index_img

# Selecting specific maps to display: maps were manually chosen to be similar
indices = {dict_learning: 25, canica: 33}
# We select relevant cut coordinates for displaying
cut_component = index_img(components_imgs[0], indices[dict_learning])
cut_coords = find_xyz_cut_coords(cut_component)
for estimator, components in zip(estimators, components_imgs):
    # 4D plotting
    plot_prob_atlas(components,
                    view_type="filled_contours",
                    title="%s" % names[estimator],
                    cut_coords=cut_coords,
                    colorbar=False)
    # 3D plotting
    plot_stat_map(index_img(components, indices[estimator]),
                  title="%s" % names[estimator],
                  cut_coords=cut_coords,
                  colorbar=False)
show()
Ejemplo n.º 20
0
def plot_contrasts(df,
                   task_contrast,
                   masker,
                   write_dir,
                   cut=0,
                   display_mode='x',
                   name=''):
    """
    Parameters
    ----------
    df: pandas dataframe,
        holding information on the database indexed by task, contrast, subject
    task_contrasts: list of tuples,
               Pairs of (task, contrasts) to be displayed
    masker: nilearn.NiftiMasker instance,
            used to mask out images
    write_dir: string,
               where to write the result
    """
    from nilearn.plotting import cm
    fig = plt.figure(figsize=(16, 4), facecolor='k')
    plt.axis('off')
    n_maps = len(task_contrast)
    cmap = plt.get_cmap(plt.cm.gist_rainbow)
    color_list = cmap(np.linspace(0, 1, n_maps + 1))
    break_length = 165. / n_maps
    grid = 5 * np.ones((10, 10))
    grid[0] = 1
    for i in range(n_maps):
        delta = 1. / n_maps
        pos = [delta * i, 0.01, delta, .1]
        ax = fig.add_axes(pos, facecolor='k')
        ax.axis('off')
        inset = fig.add_axes([delta * i, 0.01, .01, .05])
        inset.imshow(grid, cmap=cm.alpha_cmap(color_list[i]))
        inset.axis('off')
        x_text = .08
        y_text = .95
        ax.text(x_text,
                y_text,
                break_string(BETTER_NAMES[task_contrast[i][1]], break_length),
                va='top',
                ha='left',
                fontsize=11,
                color='w',
                transform=ax.transAxes)

    for i, subject in enumerate(SUBJECTS):
        # anat = df[df.contrast == 't1_bet'][df.subject == subject].path.values[-1]
        anat = df[df.contrast == 'highres_gm'][df.subject ==
                                               subject].path.values[-1]
        print(anat)
        axes = plt.axes(
            [.01 + .167 * np.mod(i, 6), .12 + .44 * (i / 6), .165, .44])
        th_imgs = []
        for task, contrast in task_contrast:
            imgs = df[df.task == task][df.contrast == contrast]\
                   [df.subject == subject][df.acquisition == 'ffx'].path.values
            if len(imgs > 0):
                img = imgs[-1]
                threshold = np.percentile(masker.transform(img), 99)
                th_img, _ = map_threshold(img,
                                          threshold=threshold,
                                          height_control='height',
                                          cluster_threshold=5)
                th_imgs.append(th_img)
        plotting.plot_prob_atlas(
            th_imgs,
            bg_img=anat,
            axes=axes,
            display_mode=display_mode,
            cut_coords=[cut],
            black_bg=True,
            annotate=False,
            dim=0,  # title=subject,
            colorbar=False,
            view_type='filled_contours',
            linewidths=2.)
        axes.axis('off')
    fig.savefig(os.path.join(write_dir, 'snapshot_%s.pdf' % name),
                facecolor='k',
                dpi=300)
Ejemplo n.º 21
0
harvard_oxford_sub = datasets.fetch_atlas_harvard_oxford('sub-prob-2mm')

# Multi Subject Dictionary Learning Atlas
msdl = datasets.fetch_atlas_msdl()

# Smith ICA Atlas and Brain Maps 2009
smith = datasets.fetch_atlas_smith_2009()

# ICBM tissue probability
icbm = datasets.fetch_icbm152_2009()

# Visualization
import matplotlib.pyplot as plt
from nilearn import plotting

atlas_types = {'Harvard_Oxford': harvard_oxford.maps,
               'Harvard_Oxford sub': harvard_oxford_sub.maps,
               'MSDL': msdl.maps, 'Smith 2009 10 RSNs': smith.rsn10,
               'Smith2009 20 RSNs': smith.rsn20,
               'Smith2009 70 RSNs': smith.rsn70,
               'Smith2009 10 Brainmap': smith.bm10,
               'Smith2009 20 Brainmap': smith.bm20,
               'Smith2009 70 Brainmap': smith.bm70,
               'ICBM tissues': (icbm['wm'], icbm['gm'], icbm['csf'])}

for name, atlas in sorted(atlas_types.items()):
        plotting.plot_prob_atlas(atlas,
                                 title=name)

plt.show()
Ejemplo n.º 22
0
plotting.plot_stat_map(threshold_value_img,
                       draw_cross=False,
                       cut_coords=[30, -72, -6],
                       title='threshold image with intensity'
                       'value',
                       colorbar=False)
from nilearn.regions import connected_regions
# regions_percentile_img,index = connected_regions(threshold_percentile_img,
#                                                 min_region_size=1500)
regions_value_img, index = connected_regions(threshold_value_img,
                                             min_region_size=1400)
print(regions_value_img.shape)
# regions_value_img.to_filename("D:/FaceData/roi.nii.gz")
# title = ("ROIs using percentile threshold."
#        "\n Each ROI in same color is an extracted region")
# plotting.plot_prob_atlas(regions_percentile_img,anat_img=t_map,
#                         view_type='contours',display_mode='x',
#                         cut_coords=5)
# title = ("ROIs using image intensity threshold."
#        "\n Each ROI in same color is an extracted region")
plotting.plot_prob_atlas(regions_value_img,
                         bg_img=t_map,
                         view_type='contours',
                         cut_coords=[30, -72, -6])
plotting.plot_roi(regions_value_img,
                  anat_img,
                  title='RFFA_ROI',
                  cut_coords=[30, -72, -6],
                  draw_cross=False)
plotting.show()
Ejemplo n.º 23
0
# parietal nodes
display.add_overlay(image.index_img(atlas_filename, 5),
                    cmap=plotting.cm.black_blue)
display.add_overlay(image.index_img(atlas_filename, 6),
                    cmap=plotting.cm.black_green)
display.add_overlay(image.index_img(atlas_filename, 3),
                    cmap=plotting.cm.black_pink)

plotting.show()


###############################################################################
# Visualizing a probablistic atlas with plot_prob_atlas
# =====================================================
#
# Alternatively, we can create a new 4D-image by selecting the 3rd, 4th, 5th and 6th (zero-based) probabilistic map from atlas
# via :func:`nilearn.image.index_img` and use :func:`nilearn.plotting.plot_prob_atlas` (added in version 0.2)
# to plot the selected nodes in one step.
#
# Unlike :func:`nilearn.plotting.plot_stat_map` this works with 4D images

dmn_nodes = image.index_img(atlas_filename, [3, 4, 5, 6])
# Note that dmn_node is now a 4D image
print(dmn_nodes.shape)
####################################

display = plotting.plot_prob_atlas(dmn_nodes,
                                   cut_coords=(0, -55, 29),
                                   title="DMN nodes in MSDL atlas")
plotting.show()
canica = CanICA(n_components=20, smoothing_fwhm=6.,
                memory="nilearn_cache", memory_level=5,
                threshold=3., verbose=10, random_state=0)
canica.fit(func_filenames)

# Retrieve the independent components in brain space
components_img = canica.masker_.inverse_transform(canica.components_)
# components_img is a Nifti Image object, and can be saved to a file with
# the following line:
components_img.to_filename('canica_resting_state.nii.gz')


####################################################################
# To visualize we plot the outline of all components on one figure
from nilearn.plotting import plot_prob_atlas

# Plot all ICA components together
plot_prob_atlas(components_img, title='All ICA components')


####################################################################
# Finally, we plot the map for each ICA component separately
from nilearn.image import iter_img
from nilearn.plotting import plot_stat_map, show

for i, cur_img in enumerate(iter_img(components_img)):
    plot_stat_map(cur_img, display_mode="z", title="IC %d" % i,
                  cut_coords=1, colorbar=False)

show()
Ejemplo n.º 25
0
# Allen RSN networks
allen = datasets.fetch_atlas_allen_2011()

# Pauli subcortical atlas
subcortex = datasets.fetch_atlas_pauli_2017()

# Visualization
from nilearn import plotting

atlas_types = {'Harvard_Oxford': harvard_oxford.maps,
               'Harvard_Oxford sub': harvard_oxford_sub.maps,
               'MSDL': msdl.maps, 'Smith 2009 10 RSNs': smith.rsn10,
               'Smith2009 20 RSNs': smith.rsn20,
               'Smith2009 70 RSNs': smith.rsn70,
               'Smith2009 20 Brainmap': smith.bm20,
               'Smith2009 70 Brainmap': smith.bm70,
               'ICBM tissues': (icbm['wm'], icbm['gm'], icbm['csf']),
               'Allen2011': allen.rsn28,
               'Pauli2017 Subcortical Atlas': subcortex.maps,
               }

for name, atlas in sorted(atlas_types.items()):
    plotting.plot_prob_atlas(atlas, title=name)

# An optional colorbar can be set
plotting.plot_prob_atlas(smith.bm10, title='Smith2009 10 Brainmap (with'
                                           ' colorbar)',
                         colorbar=True)
print('ready')
plotting.show()
# Initialize DictLearning object
dict_learn = DictLearning(n_components=8, smoothing_fwhm=6.,
                          memory="nilearn_cache", memory_level=2,
                          random_state=0)
# Fit to the data
dict_learn.fit(func_filenames)
# Resting state networks/maps in attribute `components_img_`
# Note that this attribute is implemented from version 0.4.1.
# For older versions, see the note section above for details.
components_img = dict_learn.components_img_

# Visualization of functional networks
# Show networks using plotting utilities
from nilearn import plotting

plotting.plot_prob_atlas(components_img, view_type='filled_contours',
                         title='Dictionary Learning maps')

################################################################################
# Extract regions from networks
# ------------------------------

# Import Region Extractor algorithm from regions module
# threshold=0.5 indicates that we keep nominal of amount nonzero voxels across all
# maps, less the threshold means that more intense non-voxels will be survived.
from nilearn.regions import RegionExtractor

extractor = RegionExtractor(components_img, threshold=0.5,
                            thresholding_strategy='ratio_n_voxels',
                            extractor='local_regions',
                            standardize=True, min_region_size=1350)
# Just call fit() to process for regions extraction
Ejemplo n.º 27
0
def run_canica_subject(
        sub_id,
        cond_id='D1_D5',
        ds_dir='/data/BnB_USER/oliver/somato',
        out_basedir='/home/homeGlobal/oli/somato/scratch/ica/CanICA',
        ncomps=50,
        smoothing=3,
        caa=True,
        standard=True,
        detr=True,
        highpass=.01953125,
        tr=2.,
        masktype='epi',
        ninit=10,
        seed=42,
        verb=10):
    """
    Run Nilearn's CanICA on a single condition of a single subject.
    """
    # load example image
    bold_file = pjoin(ds_dir, sub_id, cond_id, 'data.nii.gz')
    bold_img = load_img(bold_file)
    # paths to output
    out_dir = pjoin(out_basedir, sub_id, cond_id)
    if not os.path.exists(out_dir):
        os.makedirs(out_dir)
    out_comp_nii = pjoin(out_dir, 'components.nii.gz')
    out_components_arr = pjoin(out_dir, 'components.npy')
    out_png = pjoin(out_dir, 'components_probatlas.png')

    # set up ica
    ica = CanICA(n_components=ncomps,
                 smoothing_fwhm=smoothing,
                 do_cca=caa,
                 standardize=standard,
                 detrend=detr,
                 mask_strategy=masktype,
                 high_pass=highpass,
                 t_r=tr,
                 n_init=ninit,
                 random_state=seed,
                 verbose=verb)
    # more interesting arguments
    # mask_strategy='mni_something, mask_args=see nilearn.masking.compute_epi_mask, threshold=3.

    # fit ica
    ica.fit(bold_img)

    # save components as 4d nifti
    components_img = ica.components_img_
    components_img.to_filename(out_comp_nii)
    # plot components as prob atlas and save plot
    g = plot_prob_atlas(components_img, bg_img=mean_img(bold_img))
    g.savefig(out_png, dpi=300)
    # save components as 2d np array
    components_arr = ica.components_
    np.save(out_components_arr, components_arr)
    # save automatically generated epi mask
    if masktype == 'epi':
        mask_img = ica.mask_img_
        out_mask_img = pjoin(out_dir, 'mask_img.nii.gz')
        mask_img.to_filename(out_mask_img)

    return ica  # return ica object for later use
Ejemplo n.º 28
0
################################################################################
# Extracting the regions by importing connected regions function
from nilearn.regions import connected_regions

regions_percentile_img, index = connected_regions(threshold_percentile_img,
                                                  min_region_size=1500)

regions_value_img, index = connected_regions(threshold_value_img,
                                             min_region_size=1500)

################################################################################
# Visualizing region extraction results
title = ("ROIs using percentile thresholding. "
         "\n Each ROI in same color is an extracted region")
plotting.plot_prob_atlas(regions_percentile_img,
                         bg_img=tmap_filename,
                         view_type='contours',
                         display_mode='z',
                         cut_coords=5,
                         title=title)
title = ("ROIs using image intensity thresholding. "
         "\n Each ROI in same color is an extracted region")
plotting.plot_prob_atlas(regions_value_img,
                         bg_img=tmap_filename,
                         view_type='contours',
                         display_mode='z',
                         cut_coords=5,
                         title=title)
plotting.show()
dict_learn = DictLearning(n_components=5,
                          smoothing_fwhm=6.,
                          memory="nilearn_cache",
                          memory_level=2,
                          random_state=0)
# Fit to the data
dict_learn.fit(func_filenames)
# Resting state networks/maps
components_img = dict_learn.masker_.inverse_transform(dict_learn.components_)

# Visualization of resting state networks
# Show networks using plotting utilities
from nilearn import plotting

plotting.plot_prob_atlas(components_img,
                         view_type='filled_contours',
                         title='Dictionary Learning maps')

################################################################################
# Extracting regions from networks

# Import Region Extractor algorithm from regions module
# threshold=0.5 indicates that we keep nominal of amount nonzero voxels across all
# maps, less the threshold means that more intense non-voxels will be survived.
from nilearn.regions import RegionExtractor

extractor = RegionExtractor(components_img,
                            threshold=0.5,
                            thresholding_strategy='ratio_n_voxels',
                            extractor='local_regions',
                            standardize=True,
Ejemplo n.º 30
0
# Now add as an overlay the maps for the ACC and the left and right
# parietal nodes
display.add_overlay(image.index_img(atlas_filename, 5),
                    cmap=plotting.cm.black_blue)
display.add_overlay(image.index_img(atlas_filename, 6),
                    cmap=plotting.cm.black_green)
display.add_overlay(image.index_img(atlas_filename, 3),
                    cmap=plotting.cm.black_pink)

plotting.show()

###############################################################################
# Visualizing a probablistic atlas with plot_prob_atlas
# =====================================================
#
# Alternatively, we can create a new 4D-image by selecting the 3rd, 4th, 5th and 6th (zero-based) probabilistic map from atlas
# via :func:`nilearn.image.index_img` and use :func:`nilearn.plotting.plot_prob_atlas` (added in version 0.2)
# to plot the selected nodes in one step.
#
# Unlike :func:`nilearn.plotting.plot_stat_map` this works with 4D images

dmn_nodes = image.index_img(atlas_filename, [3, 4, 5, 6])
# Note that dmn_node is now a 4D image
print(dmn_nodes.shape)
####################################

display = plotting.plot_prob_atlas(dmn_nodes,
                                   cut_coords=(0, -55, 29),
                                   title="DMN nodes in MSDL atlas")
plotting.show()
plotting.plot_stat_map(threshold_percentile_img, display_mode='z', cut_coords=5,
                       title='Threshold image with string percentile', colorbar=False)

# Showing intensity threshold image
plotting.plot_stat_map(threshold_value_img, display_mode='z', cut_coords=5,
                       title='Threshold image with intensity value', colorbar=False)

################################################################################
# Extracting the regions by importing connected regions function
from nilearn.regions import connected_regions

regions_percentile_img, index = connected_regions(threshold_percentile_img,
                                                  min_region_size=1500)

regions_value_img, index = connected_regions(threshold_value_img,
                                             min_region_size=1500)

################################################################################
# Visualizing region extraction results
title = ("ROIs using percentile thresholding. "
         "\n Each ROI in same color is an extracted region")
plotting.plot_prob_atlas(regions_percentile_img, bg_img=tmap_filename,
                         view_type='contours', display_mode='z',
                         cut_coords=5, title=title)
title = ("ROIs using image intensity thresholding. "
         "\n Each ROI in same color is an extracted region")
plotting.plot_prob_atlas(regions_value_img, bg_img=tmap_filename,
                         view_type='contours', display_mode='z',
                         cut_coords=5, title=title)
plotting.show()
Ejemplo n.º 32
0
    # stats_report_filename = os.path.join(subject_output_dir, "reports",
    #                                      "report_stats.html")
    # generate_subject_stats_report(
    #     stats_report_filename, contrasts, z_maps, fmri_glm.mask, anat=anat,
    #     threshold=2.3, cluster_th=15, design_matrices=design_matrices, TR=tr,
    #     subject_id="sub001", n_scans=n_scans, hfcut=hfcut,
    #     paradigm=paradigm, frametimes=frametimes,
    #     drift_model=drift_model, hrf_model=hrf_model)
    # ProgressReport().finish_dir(subject_output_dir)

    return dict(subject_id=subject_id, mask=mask_path,
                effects_maps=effects_maps, z_maps=z_maps, contrasts=contrasts)


# first level GLM
mem = Memory(os.path.join(output_dir, "cache_dir"))
n_jobs = min(n_jobs, len(subject_ids))
first_levels = Parallel(n_jobs=n_jobs)(delayed(mem.cache(do_subject_glm))(
    subject_id) for subject_id in subject_ids)

# run second-level GLM
group_zmaps = group_one_sample_t_test(
    [subject_data["mask"] for subject_data in first_levels],
    [subject_data["effects_maps"] for subject_data in first_levels],
    first_levels[0]["contrasts"],
    output_dir, threshold=2.)
plot_prob_atlas([zmap for zmap in group_zmaps.values() if "_minus_" in zmap],
                threshold=1.2, view_type="filled_contours")
plt.savefig("group_zmaps.png")
show()
Ejemplo n.º 33
0
    #     paradigm=paradigm, frametimes=frametimes,
    #     drift_model=drift_model, hrf_model=hrf_model)
    # ProgressReport().finish_dir(subject_output_dir)

    return dict(subject_id=subject_id,
                mask=mask_path,
                effects_maps=effects_maps,
                z_maps=z_maps,
                contrasts=contrasts)


# first level GLM
mem = Memory(os.path.join(output_dir, "cache_dir"))
n_jobs = min(n_jobs, len(subject_ids))
first_levels = Parallel(n_jobs=n_jobs)(
    delayed(mem.cache(do_subject_glm))(subject_id)
    for subject_id in subject_ids)

# run second-level GLM
group_zmaps = group_one_sample_t_test(
    [subject_data["mask"] for subject_data in first_levels],
    [subject_data["effects_maps"] for subject_data in first_levels],
    first_levels[0]["contrasts"],
    output_dir,
    threshold=2.)
plot_prob_atlas([zmap for zmap in group_zmaps.values() if "_minus_" in zmap],
                threshold=1.2,
                view_type="filled_contours")
plt.savefig("group_zmaps.png")
show()
Ejemplo n.º 34
0

# In[ ]:


# the following line: saves components in file
components.to_filename('canica_resting_state.nii.gz')


# In[ ]:


from nilearn.plotting import plot_prob_atlas

# Plot all ICA components together
plot_prob_atlas(components, title='All ICA components')


# In[ ]:


from nilearn.image import iter_img
from nilearn.plotting import plot_stat_map, show

for i, cur_img in enumerate(iter_img(components)):
    plot_stat_map(cur_img, display_mode="z", title="IC %d" % i,
                  cut_coords=1, colorbar=False)


# In[ ]:
    for i in range(n_subjects):
        plotting.plot_stat_map(masker.inverse_transform(components[i, :, 0]),
                               colorbar=False,
                               output_file=os.path.join(
                                   write_dir, '_component_%02d.png' % i),
                               display_mode='x',
                               cut_coords=5,
                               threshold=.00)

    for i, subject in enumerate(subject_list):
        bg_img = db[db.contrast == 't1'][db.subject == subject].path.values[-1]
        plotting.plot_prob_atlas(
            masker.inverse_transform(components[i, :, :].T),
            view_type='filled_contours',
            output_file=os.path.join(write_dir, 'dictionary_%s.png' % subject),
            linewidths=.5,
            dim=0,
            display_mode='x',
            bg_img=bg_img,
            cut_coords=[-50],
            black_bg=True)

    for idx in range(n_components):
        plot_dictionary(components, idx, write_dir)

    for i in range(n_components):
        cmp_ = masker.inverse_transform(mean_components[:, i])
        nib.save(cmp_, os.path.join(write_dir,
                                    'mean_component_%02d.nii.gz' % i))
"""
for i in [0, 1, 8]:
    for j in range(n_subjects):
Ejemplo n.º 36
0
# Multi Subject Dictionary Learning Atlas
msdl = datasets.fetch_atlas_msdl()

# Smith ICA Atlas and Brain Maps 2009
smith = datasets.fetch_atlas_smith_2009()

# ICBM tissue probability
icbm = datasets.fetch_icbm152_2009()

# Visualization
import matplotlib.pyplot as plt
from nilearn import plotting

atlas_types = {
    'Harvard_Oxford': harvard_oxford.maps,
    'Harvard_Oxford sub': harvard_oxford_sub.maps,
    'MSDL': msdl.maps,
    'Smith 2009 10 RSNs': smith.rsn10,
    'Smith2009 20 RSNs': smith.rsn20,
    'Smith2009 70 RSNs': smith.rsn70,
    'Smith2009 10 Brainmap': smith.bm10,
    'Smith2009 20 Brainmap': smith.bm20,
    'Smith2009 70 Brainmap': smith.bm70,
    'ICBM tissues': (icbm['wm'], icbm['gm'], icbm['csf'])
}

for name, atlas in sorted(atlas_types.items()):
    plotting.plot_prob_atlas(atlas, title=name)

plt.show()
# Accuracy Series: 84.6% || series+ 8roi: 84.63%% || Corr: 68.16%
#atlas_harvard_oxford.maps= ICA_folder+ '\\ICASSO25_schizo_23_comp.nii'

# Accuracy Series: 89.4% || series+ 8roi: 90.1% || Corr: 66.15%  || 84 Features
#atlas_harvard_oxford.maps= ICA_folder+ '\\ICASSO30_schizo_30_comp.nii'

# Accuracy Series: 88.13% || series+ 8roi: 88.9.1% || Corr: 70.5%
#atlas_harvard_oxford.maps= ICA_folder+ '\\ICASSO35_schizo_35_comp.nii'

# Accuracy Series:Thershold=0.1:  62.21% || series+ 8roi:  || Corr: % || 16 Features
# Group SVM: 68.2%
# Accuracy of each 16 egion extractor=[50,52,51.3,52.4,49.7,50.6,50.3,49.7,51,50.4,48.9,48.8,51.9,51.1]
# Accuracy using EEG type band extraction and then feature ext or directly feature ext is below 50%
#atlas_harvard_oxford.maps= ICA_folder+ '\\ICASSO25_11_17.nii'

plotting.plot_prob_atlas(atlas_harvard_oxford.maps)

###################################################################################
###################################################################################
##################### Dictionary Learning ########################################
'''
# Import dictionary learning algorithm from decomposition module and call the
# object and fit the model to the functional datasets
from nilearn.decomposition import DictLearning

# Initialize DictLearning object
dict_learn = DictLearning(n_components=25, smoothing_fwhm=None,
                          memory=r'D:\ROI Schizo\ICA\Dict_learning\Nilearn_cache', 
                          memory_level=5, random_state=0, n_jobs=6)
# Fit to the data
dict_learn.fit(func)

i = 0
nrows = 6
ncols = 3
fig, axes = plt.subplots(nrows=nrows, ncols=ncols, figsize=(10, 10),
                         squeeze=True)
cmaps = ['gist_earth', 'terrain', 'ocean', 'gist_stern',
         'brg', 'CMRmap', 'cubehelix', 'gnuplot', 'gnuplot2',
         'gist_ncar', 'nipy_spectral', 'jet', 'rainbow',
         'gist_rainbow', 'hsv', 'flag', 'prism']
for ii in range(nrows):
    for jj in range(ncols):
        if i < 17:
            cur_img = index_img(list_of_imgs[i], 0)
            cut_coords = plotting.find_xyz_cut_coords(cur_img)
            plotting.plot_prob_atlas(list_of_imgs[i],
                                     cmap=cmaps[i],
                                     view_type='filled_contours',
                                     threshold=0.1, alpha=0.8,
                                     axes=axes[ii, jj], figure=fig,
                                     draw_cross=False,
                                     cut_coords=cut_coords)
            plt.title(list_of_titles[i], fontsize=10,
                      horizontalalignment='center')
            i += 1
        else:
            axes[5, 2].remove()
            plt.savefig('maps.pdf')
            plt.close()
Ejemplo n.º 39
0
# Apply our decomposition estimator with reduction
n_components = 20

dict_fact = SpcaFmri(n_components=n_components, smoothing_fwhm=6.,
                     memory="nilearn_cache", memory_level=2,
                     reduction=3,
                     verbose=4,
                     alpha=0.001,
                     random_state=0,
                     n_epochs=1,
                     n_jobs=1,
                     )

print('[Example] Learning maps')
t0 = time.time()
dict_fact.fit(func_filenames)
print('[Example] Dumping results')
# Decomposition estimator embeds their own masker
masker = dict_fact.masker_
components_img = masker.inverse_transform(dict_fact.components_)
components_img.to_filename('components.nii.gz')
time = time.time() - t0
print('[Example] Run in %.2f s' % time)
# Show components from both methods using 4D plotting tools
from nilearn.plotting import plot_prob_atlas, show

print('[Example] Displaying')

plot_prob_atlas(components_img, view_type="filled_contours",
                title="Reduced sparse PCA",colorbar=False)
show()
                             random_state=0,
                             n_epochs=1,
                             mask_strategy='template')

print('[Example] Fitting dicitonary learning model')
dict_learning.fit(func_filenames)
print('[Example] Saving results')
# Grab extracted components umasked back to Nifti image.
# Note: For older versions, less than 0.4.1. components_img_
# is not implemented. See Note section above for details.
dictlearning_components_img = dict_learning.components_img_
dictlearning_components_img.to_filename('dict_learning.nii.gz')

from nilearn.plotting import plot_prob_atlas
plot_prob_atlas(dictlearning_components_img,
                title='All DictLearning components',
                output_file="dict_learning_atlas.png")

from nilearn.image import iter_img
from nilearn.plotting import plot_stat_map, show

j = 0
import os
os.system("mkdir dict_learning_images")

for i, cur_img in enumerate(iter_img(dictlearning_components_img)):
    ofile = "dict_learning_images/img_" + str(j) + ".png"
    j = j + 1
    plot_stat_map(cur_img,
                  display_mode="z",
                  title="Comp %d" % i,
Ejemplo n.º 41
0
                     backend='c',
                     # trace_folder=trace_folder,
                     n_jobs=n_jobs,
                     )

print('[Example] Learning maps')
t0 = time.time()
dict_fact.fit(func_filenames)
print('[Example] Dumping results')
# Decomposition estimator embeds their own masker
masker = dict_fact.masker_
components_img = masker.inverse_transform(dict_fact.components_)
components_img.to_filename(join(trace_folder, 'components.nii.gz'))
time = time.time() - t0
print('[Example] Run in %.2f s' % time)
# Show components from both methods using 4D plotting tools
import matplotlib.pyplot as plt
from nilearn.plotting import plot_prob_atlas, plot_stat_map, show
from nilearn.image import index_img

print('[Example] Displaying')
fig, axes = plt.subplots(2, 1)
plot_prob_atlas(components_img, view_type="filled_contours",
                axes=axes[0])
plot_stat_map(index_img(components_img, 0),
              axes=axes[1],
              colorbar=False,
              threshold=0)
plt.savefig(join(trace_folder, 'components.pdf'))
show()
Ejemplo n.º 42
0
    canica_dir = data_dir + '/' + split + '/' + 'canica'
    create_dir(canica_dir)
    filename = canica_dir + '/' + fwhm + '_resting_state_all.nii.gz'
    # save components image
    components_img.to_filename(op.join(data_dir, filename))
    # update configuration file
    experiment["files_path"]["brain_atlas"]["components_img"] = filename
    local_config["experiment"] = experiment

    # -----------------------------------------------------------------
    from nilearn.plotting import plot_prob_atlas

    # Plot all ICA components together
    plot_prob_atlas(components_img,
                    title='CanICA based Brain Atlas',
                    view_type="filled_contours",
                    output_file=op.join(
                        canica_dir,
                        fwhm + '_resting_state_all_plot_prob_atlas'))
# In[]
# ------------------------------------------------------------------
if (args.dictlearn):
    # Extract resting-state networks with DictionaryLearning

    # Import dictionary learning algorithm from decomposition module and call the
    # object and fit the model to the functional datasets
    from nilearn.decomposition import DictLearning
    logger.info("Dict Learning algorithm starting...")

    # Initialize DictLearning object
    dict_learn = DictLearning(n_components=args.n_components,
                              verbose=args.verbose,
Ejemplo n.º 43
0
    smoothing_fwhm=6.,
    memory="nilearn_cache",
    memory_level=2,
    reduction=3,
    verbose=4,
    alpha=0.001,
    random_state=0,
    n_epochs=1,
    n_jobs=1,
)

print('[Example] Learning maps')
t0 = time.time()
dict_fact.fit(func_filenames)
print('[Example] Dumping results')
# Decomposition estimator embeds their own masker
masker = dict_fact.masker_
components_img = masker.inverse_transform(dict_fact.components_)
components_img.to_filename('components.nii.gz')
time = time.time() - t0
print('[Example] Run in %.2f s' % time)
# Show components from both methods using 4D plotting tools
from nilearn.plotting import plot_prob_atlas, show

print('[Example] Displaying')

plot_prob_atlas(components_img,
                view_type="filled_contours",
                title="Reduced sparse PCA",
                colorbar=False)
show()
extraction = RegionExtractor(atlas_networks, min_region_size=800,
                             threshold=98, thresholding_strategy='percentile')

# Just call fit() to execute region extraction procedure
extraction.fit()
regions_img = extraction.regions_img_

################################################################################
# Visualization
# Show region extraction results by importing image & plotting utilities
from nilearn import plotting
from nilearn.image import index_img
from nilearn.plotting import find_xyz_cut_coords

# Showing region extraction results using 4D maps visualization tool
plotting.plot_prob_atlas(regions_img, display_mode='z', cut_coords=1,
                         view_type='contours', title="Regions extracted.")

# To reduce the complexity, we choose to display all the regions
# extracted from network 3
import numpy as np

DMN_network = index_img(atlas_networks, 3)
plotting.plot_stat_map(DMN_network, display_mode='z', cut_coords=1,
                       title='Network 3', colorbar=False)

regions_indices_network3 = np.where(np.array(extraction.index_) == 3)
for index in regions_indices_network3[0]:
    cur_img = index_img(extraction.regions_img_, index)
    coords = find_xyz_cut_coords(cur_img)
    plotting.plot_stat_map(cur_img, display_mode='z', cut_coords=coords[2:3],
                           title="Blob of network3", colorbar=False)
    # Grab extracted components umasked back to Nifti image.
    # Note: For older versions, less than 0.4.1. components_img_
    # is not implemented. See Note section above for details.
    components_img = estimator.components_img_
    components_img.to_filename('%s_resting_state.nii.gz' %
                               names[estimator])
    components_imgs.append(components_img)

###############################################################################
# Visualize the results
# ----------------------
from nilearn.plotting import (plot_prob_atlas, find_xyz_cut_coords, show,
                              plot_stat_map)
from nilearn.image import index_img

# Selecting specific maps to display: maps were manually chosen to be similar
indices = {dict_learning: 25, canica: 33}
# We select relevant cut coordinates for displaying
cut_component = index_img(components_imgs[0], indices[dict_learning])
cut_coords = find_xyz_cut_coords(cut_component)
for estimator, components in zip(estimators, components_imgs):
    # 4D plotting
    plot_prob_atlas(components, view_type="filled_contours",
                    title="%s" % names[estimator],
                    cut_coords=cut_coords, colorbar=False)
    # 3D plotting
    plot_stat_map(index_img(components, indices[estimator]),
                  title="%s" % names[estimator],
                  cut_coords=cut_coords, colorbar=False)
show()
Ejemplo n.º 46
0
canica.fit(func_filenames)

# Retrieve the independent components in brain space. Directly
# accesible through attribute `components_img_`.
canica_components_img = canica.components_img_
# components_img is a Nifti Image object, and can be saved to a file with
# the following line:
canica_components_img.to_filename('canica_resting_state.nii.gz')


####################################################################
# To visualize we plot the outline of all components on one figure
from nilearn.plotting import plot_prob_atlas

# Plot all ICA components together
plot_prob_atlas(canica_components_img, title='All ICA components')


####################################################################
# Finally, we plot the map for each ICA component separately
from nilearn.image import iter_img
from nilearn.plotting import plot_stat_map, show

for i, cur_img in enumerate(iter_img(canica_components_img)):
    plot_stat_map(cur_img, display_mode="z", title="IC %d" % i,
                  cut_coords=1, colorbar=False)


####################################################################
# Compare CanICA to dictionary learning
# -------------------------------------------------------------