Beispiel #1
0
def make_html(stat_map=None,
              mesh=None,
              threshold=None,
              cmap=plotting.cm.cold_hot):
    if stat_map is None:
        stat_map = datasets.fetch_localizer_button_task()['tmaps'][0]
    info, colors = full_brain_info(stat_map=stat_map,
                                   mesh=mesh,
                                   threshold=threshold,
                                   cmap=cmap)
    as_json = json.dumps(info)
    as_html = HTML_TEMPLATE.replace('INSERT_STAT_MAP_JSON_HERE', as_json)
    as_html = as_html.replace('INSERT_COLORSCALE_HERE', colors)
    return as_html
def anat_preproc(filename):

    from nilearn.datasets import fetch_localizer_button_task
    from nilearn.datasets import load_mni152_template
    from nilearn import plotting
    from nilearn.image import resample_to_img
    from nilearn.image import load_img

    template = load_mni152_template()
    localizer_dataset = fetch_localizer_button_task(get_anats=True)
    localizer_tmap_filename = localizer_dataset.tmaps[0]
    localizer_anat_filename = localizer_dataset.anats[0]

    resampled_localizer_tmap = resample_to_img(filename, template)

    tmap_img = load_img(filename)
    original_shape = tmap_img.shape
    original_affine = tmap_img.affine
    resampled_shape = resampled_localizer_tmap.shape
    resampled_affine = resampled_localizer_tmap.affine
    template_img = load_img(template)
    template_shape = template_img.shape
    template_affine = template_img.affine
    print("""Shape comparison:
	-Original t-map image shape: {0}
	-Resampled t-map image shape: {1}
	-Template image shape: {2}
	""".format(original_shape, resampled_shape, template_shape))
    print("""Affine comparison:
	-Original t-map image affine:\n{0}
	-Resampled t-map image:\n{0}
	-Template image affine:\n{2}
	""".format(original_affine, resampled_affine, template_affine))

    plotting.plot_stat_map(localizer_tmap_filename,
                           bg_img=localizer_anat_filename,
                           cut_coords=(36, -27, 66),
                           threshold=3,
                           title="t-map on original anat")
    plotting.plot_stat_map(resampled_localizer_tmap,
                           bg_img=template,
                           cut_coords=(36, -27, 66),
                           threshold=3,
                           title="Resampled t-map on MNI template anat")
    plotting.show()
"""
Glass brain plotting in nilearn (all options)
=============================================

This example goes through different options of the :func:`nilearn.plotting.plot_glass_brain` function
(including plotting negative values).

See :ref:`plotting` for more plotting functionalities.
"""


###############################################################################
# Retrieve the data
from nilearn import datasets

localizer_dataset = datasets.fetch_localizer_button_task()
localizer_tmap_filename = localizer_dataset.tmaps[0]

###############################################################################
# Demo glass brain plotting.
from nilearn import plotting

# Whole brain sagittal cuts
plotting.plot_glass_brain(localizer_tmap_filename, threshold=3)


###############################################################################
# With a colorbar
plotting.plot_glass_brain(localizer_tmap_filename, threshold=3, colorbar=True)

See :ref:`plotting` for more details.
"""

###############################################################################
# First, we retrieve data from nilearn provided (general-purpose) datasets

from nilearn import datasets

# haxby dataset to have anatomical image, EPI images and masks
haxby_dataset = datasets.fetch_haxby()
haxby_anat_filename = haxby_dataset.anat[0]
haxby_mask_filename = haxby_dataset.mask_vt[0]
haxby_func_filename = haxby_dataset.func[0]

# localizer dataset to have contrast maps
localizer_dataset = datasets.fetch_localizer_button_task(get_anats=True)
localizer_anat_filename = localizer_dataset.anats[0]
localizer_tmap_filename = localizer_dataset.tmaps[0]

########################################
# Now, we show from here how to visualize the retrieved datasets using plotting
# tools from nilearn.

from nilearn import plotting

########################################
# Visualizing contrast map in three different orthogonal views - 'sagittal',
# 'coronal' and 'axial' with coordinate positions in each view are given
# of interest manually also with colorbar on the right side of the plots.

# The first argument is a path to the filename of a constrast map,
"""

######################################################################
import warnings
warnings.simplefilter('ignore')

######################################################################
# Statistical maps
# ================

######################################################################
# Download and plot an individual-level statistical map and plot it on the
# subject's T1 image:

from nilearn import datasets, plotting
localizer = datasets.fetch_localizer_button_task()
plotting.view_img(localizer['tmap'], bg_img=localizer['anat'], threshold='97%')


######################################################################
# Download and plot a group-level statistical map:

img = datasets.fetch_neurovault_motor_task()['images'][0]
plotting.view_img(img, threshold='95%')


######################################################################
# More about dataset downloaders: https://nilearn.github.io/modules/reference.html#module-nilearn.datasets
#
# More about plotting: https://nilearn.github.io/plotting/index.html
Beispiel #6
0
def load_fsaverage():
    return {
        'pial_left': '/home/jerome/workspace/scratch/fsaverage/pial_left.gii',
        'infl_left':
        '/home/jerome/workspace/scratch/fsaverage/inflated_left.gii',
        'pial_right':
        '/home/jerome/workspace/scratch/fsaverage/pial_right.gii',
        'infl_right':
        '/home/jerome/workspace/scratch/fsaverage/inflated_right.gii'
    }


if __name__ == '__main__':
    parser = argparse.ArgumentParser()
    parser.add_argument('--out_file',
                        type=str,
                        default='surface_plot_standalone.html')
    args = parser.parse_args()

    # fsaverage = datasets.fetch_surf_fsaverage5()
    fsaverage = load_fsaverage()
    stat_map = datasets.fetch_localizer_button_task()['tmaps'][0]
    as_json = to_three(fsaverage['pial_right'], stat_map,
                       fsaverage['pial_right'])
    as_html = HTML_TEMPLATE
    for k, v in as_json.items():
        as_html = as_html.replace(k, v)
    with open(args.out_file, 'w') as f:
        f.write(as_html)
###############################################################################
# Retrieve the data
# ------------------
#
# Nilearn comes with set of functions that download public data from Internet
#
# Let us first see where the data will be downloded and stored on our disk:
#
from nilearn import datasets
print('Datasets shipped with nilearn are stored in: %r' %
      datasets.get_data_dirs())

###############################################################################
# Let us now retrieve a motor task contrast maps corresponding to second subject
# from a localizer experiment
tmap_filenames = datasets.fetch_localizer_button_task()['tmaps']
print(tmap_filenames)

###############################################################################
# tmap_filenames is returned as a list. We need to take first one
tmap_filename = tmap_filenames[0]

###############################################################################
# Demo glass brain plotting
# --------------------------
from nilearn import plotting

# Whole brain sagittal cuts and map is thresholded at 3
plotting.plot_glass_brain(tmap_filename, threshold=3)

###############################################################################
Here we discover how to work with 3D and 4D niimgs.
"""

###############################################################################
# Downloading tutorial datasets from Internet
# --------------------------------------------
#
# Nilearn comes with functions that download public data from Internet
#
# Let's first check where the data is downloaded on our disk:
from nilearn import datasets
print('Datasets are stored in: %r' % datasets.get_data_dirs())

###############################################################################
# Let's now retrieve a motor contrast from a localizer experiment
tmap_filenames = datasets.fetch_localizer_button_task()['tmaps']
print(tmap_filenames)

###############################################################################
# tmap_filenames is a list of filenames. We need to take the first one
tmap_filename = tmap_filenames[0]


###############################################################################
# Visualizing a 3D file
# ----------------------
#
# The file contains a 3D volume, we can easily visualize it as a
# statistical map:
from nilearn import plotting
plotting.plot_stat_map(tmap_filename)
*dim* modifies the contrast of this image: dim=0 leaves the image
unchanged, negative values of *dim* enhance it, and positive values
decrease it (dim the background).

This *dim* argument may also be useful for the plot_roi function used to
display ROIs on top of a background image.

"""

#########################################################################
# Retrieve the data: the localizer dataset with contrast maps
# ------------------------------------------------------------

from nilearn import datasets

localizer_dataset = datasets.fetch_localizer_button_task(legacy_format=False)
# Contrast map of motor task
localizer_tmap_filename = localizer_dataset.tmap
# Subject specific anatomical image
localizer_anat_filename = localizer_dataset.anat

###########################################################################
# Plotting with enhancement of background image with dim=-.5
# --------------------------------------------------------------------------

from nilearn import plotting
plotting.plot_stat_map(localizer_tmap_filename,
                       bg_img=localizer_anat_filename,
                       cut_coords=(36, -27, 66),
                       threshold=3, title="dim=-.5",
                       dim=-.5)
See :ref:`plotting` for more details.
"""

###############################################################################
# First, we retrieve data from nilearn provided (general-purpose) datasets

from nilearn import datasets

# haxby dataset to have anatomical image, EPI images and masks
haxby_dataset = datasets.fetch_haxby(n_subjects=1)
haxby_anat_filename = haxby_dataset.anat[0]
haxby_mask_filename = haxby_dataset.mask_vt[0]
haxby_func_filename = haxby_dataset.func[0]

# localizer dataset to have contrast maps
localizer_dataset = datasets.fetch_localizer_button_task(get_anats=True)
localizer_anat_filename = localizer_dataset.anats[0]
localizer_tmap_filename = localizer_dataset.tmaps[0]

########################################
# Now, we show from here how to visualize the retrieved datasets using plotting
# tools from nilearn.

from nilearn import plotting

########################################
# Visualizing contrast map in three different orthogonal views - 'sagittal',
# 'coronal' and 'axial' with coordinate positions in each view are given
# of interest manually also with colorbar on the right side of the plots.

# The first argument is a path to the filename of a constrast map,
# :func:`nilearn.plotting.view_img_on_surf` that give more interactive
# visualizations in a web browser. See :ref:`interactive-surface-plotting` for
# more details.

view = plotting.view_surf(fsaverage.infl_right,
                          texture,
                          threshold='90%',
                          bg_map=fsaverage.sulc_right)
# uncomment this to open the plot in a web browser:
view.open_in_browser()

##############################################################################
# In a Jupyter notebook, if ``view`` is the output of a cell, it will
# be displayed below the cell

#view

##############################################################################
# We don't need to do the projection ourselves, we can use view_img_on_surf:

view = plotting.view_img_on_surf(stat_img,
                                 threshold='90%',
                                 surf_mesh='fsaverage')
view.open_in_browser()

#view

from nilearn import plotting, datasets
img = datasets.fetch_localizer_button_task()['tmap']
view = plotting.view_img_on_surf(img, threshold='90%', surf_mesh='fsaverage')
view.open_in_browser()