Пример #1
0
def plot_registration(info, trans, subject, subjects_dir):
    fig = plot_alignment(info,
                         trans,
                         subject=subject,
                         dig=True,
                         meg=True,
                         subjects_dir=subjects_dir,
                         coord_frame='head')
    set_3d_view(figure=fig, azimuth=135, elevation=80)
    mlab.savefig('/home/senthilp/Desktop/coreg.jpg')
    Image(filename='/home/senthilp/Desktop/coreg.jpg', width=500)
    mlab.show()
Пример #2
0
=================================
Plotting EEG sensors on the scalp
=================================

In this example, digitized EEG sensor locations are shown on the scalp.
"""
# Author: Eric Larson <*****@*****.**>
#
# License: BSD (3-clause)

import mne
from mne.viz import plot_alignment, set_3d_view

print(__doc__)

data_path = mne.datasets.sample.data_path()
subjects_dir = data_path + '/subjects'
trans = mne.read_trans(data_path + '/MEG/sample/sample_audvis_raw-trans.fif')
raw = mne.io.read_raw_fif(data_path + '/MEG/sample/sample_audvis_raw.fif')
# Plot electrode locations on scalp
fig = plot_alignment(raw.info,
                     trans,
                     subject='sample',
                     dig=False,
                     eeg=['original', 'projected'],
                     meg=[],
                     coord_frame='head',
                     subjects_dir=subjects_dir)
# Set viewing angle
set_3d_view(figure=fig, azimuth=135, elevation=80)
Пример #3
0
#           to coregistration. You can also use the MNE-Python
#           coregistration GUI :func:`mne:mne.gui.coregistration`.

plot_kwargs = dict(subjects_dir=subjects_dir,
                   surfaces="brain",
                   dig=True,
                   eeg=[],
                   fnirs=['sources', 'detectors'],
                   show_axes=True,
                   coord_frame='head',
                   mri_fiducials=True)

fig = mne.viz.plot_alignment(trans="fsaverage",
                             subject="fsaverage",
                             **plot_kwargs)
set_3d_view(figure=fig, azimuth=90, elevation=0, distance=1)

# %%
# This is what a head model will look like. If you have an MRI from
# the participant you can use freesurfer to generate the required files.
# For further details on generating freesurfer reconstructions see
# :ref:`mne:tut-freesurfer-reconstruction`.
#
# In the figure above you can see the brain in grey. You can also
# see the MRI fiducial positions marked with diamonds.
# The nasion fiducial is marked in green, the left and right
# preauricular points (LPA and RPA) in red and blue respectively.
#
# Next, we simultaneously plot the `fsaverage` head, and the
# data we wish to align to this head. This process is called
# coregistration and is described in several MNE-Python tutorials
Пример #4
0
# %%
# Project 3D electrodes to a 2D snapshot
# --------------------------------------
#
# Because we have the 3D location of each electrode, we can use the
# :func:`mne.viz.snapshot_brain_montage` function to return a 2D image along
# with the electrode positions on that image. We use this in conjunction with
# :func:`mne.viz.plot_alignment`, which visualizes electrode positions.

fig = plot_alignment(info,
                     subject='sample',
                     subjects_dir=subjects_dir,
                     surfaces=['pial'],
                     meg=False)
set_3d_view(figure=fig, azimuth=200, elevation=70)
xy, im = snapshot_brain_montage(fig, montage)

# Convert from a dictionary to array to plot
xy_pts = np.vstack([xy[ch] for ch in info['ch_names']])

# Define an arbitrary "activity" pattern for viz
activity = np.linspace(100, 200, xy_pts.shape[0])

# This allows us to use matplotlib to create arbitrary 2d scatterplots
fig2, ax = plt.subplots(figsize=(10, 10))
ax.imshow(im)
ax.scatter(*xy_pts.T, c=activity, s=200, cmap='coolwarm')
ax.set_axis_off()
# fig2.savefig('./brain.png', bbox_inches='tight')  # For ClickableImage
def save_images(subject_, montage_fname_):
    montage = mne.channels.read_dig_fif(montage_fname_)
    info = mne.create_info(ch_names=montage.ch_names,
                           ch_types=['eeg'] * len(montage.ch_names),
                           sfreq=100.0)
    info.set_montage(montage)
    trans = mne.channels.compute_native_head_t(montage)

    fig_ = plot_alignment(info,
                          eeg='projected',
                          show_axes=False,
                          trans=trans,
                          surfaces={"head": 1.0},
                          coord_frame='mri',
                          subject=subject_)
    fig_.plotter.off_screen = True

    set_3d_view(figure=fig_, azimuth=135, elevation=80, distance=0.4)
    fig_.plotter.screenshot(subject_ + "_1.png")

    set_3d_view(figure=fig_, azimuth=45, elevation=80, distance=0.4)
    fig_.plotter.screenshot(subject_ + "_2.png")

    set_3d_view(figure=fig_, azimuth=270, elevation=80, distance=0.4)
    fig_.plotter.screenshot(subject_ + "_3.png")

    fig_ = plot_alignment(info,
                          eeg=['original', 'projected'],
                          show_axes=True,
                          trans=trans,
                          surfaces="head",
                          coord_frame='mri',
                          dig=True,
                          subject=subject_)
    fig_.plotter.off_screen = True

    set_3d_view(figure=fig_,
                azimuth=135,
                elevation=80,
                distance=0.5,
                focalpoint=np.linalg.inv(trans["trans"])[:3, 3])
    fig_.plotter.screenshot(subject_ + "_clear_1.png")

    set_3d_view(figure=fig_,
                azimuth=45,
                elevation=80,
                distance=0.5,
                focalpoint=np.linalg.inv(trans["trans"])[:3, 3])
    fig_.plotter.screenshot(subject_ + "_clear_2.png")

    set_3d_view(figure=fig_,
                azimuth=270,
                elevation=80,
                distance=0.5,
                focalpoint=np.linalg.inv(trans["trans"])[:3, 3])
    fig_.plotter.screenshot(subject_ + "_clear_3.png")
Пример #6
0
fname_trans = data_path + '/MEG/sample/sample_audvis_raw-trans.fif'

inv_fname = data_path
inv_fname += '/MEG/sample/sample_audvis-meg-oct-6-meg-inv.fif'

inv = read_inverse_operator(inv_fname)

print("Method: %s" % inv['methods'])
print("fMRI prior: %s" % inv['fmri_prior'])
print("Number of sources: %s" % inv['nsource'])
print("Number of channels: %s" % inv['nchan'])

src = inv['src']  # get the source space

# Get access to the triangulation of the cortex

print("Number of vertices on the left hemisphere: %d" % len(src[0]['rr']))
print("Number of triangles on left hemisphere: %d" % len(src[0]['use_tris']))
print("Number of vertices on the right hemisphere: %d" % len(src[1]['rr']))
print("Number of triangles on right hemisphere: %d" % len(src[1]['use_tris']))

###############################################################################
# Show result on 3D source space

fig = mne.viz.plot_alignment(subject='sample',
                             subjects_dir=subjects_dir,
                             trans=fname_trans,
                             surfaces='white',
                             src=src)
set_3d_view(fig, focalpoint=(0., 0., 0.06))