Exemple #1
0
def _plt_src(name, kw_brain_obj, active_data, active_vert, sources,
             kw_source_obj, kw_activation, show):
    # Define a brain object and a source object :
    logger.info('Define a Brain and Source objects')
    from visbrain.objects import BrainObj, SourceObj, SceneObj
    brain_obj, source_obj = name + '_brain', name + '_sources'
    b_obj = BrainObj(brain_obj, **kw_brain_obj)
    s_obj = SourceObj(source_obj, sources, **kw_source_obj)
    s_obj.visible_obj = False
    # Add data to the BrainObj if needed :
    if isinstance(active_data, np.ndarray):
        logger.info("Add active data between "
                    "[%2f, %2f]" % (active_data.min(), active_data.max()))
        b_obj.add_activation(data=active_data,
                             vertices=active_vert,
                             **kw_activation)
    # Return either a scene or a BrainObj and SourceObj :
    if show is True:  # Display inside the Brain GUI
        # Define a Brain instance :
        from visbrain import Brain
        brain = Brain(brain_obj=b_obj, source_obj=s_obj)
        # By default, display colorbar if activation :
        if isinstance(active_data, np.ndarray):
            brain.menuDispCbar.setChecked(True)
            brain._fcn_menu_disp_cbar()
        brain.show()
    elif show is 'scene':  # return a SceneObj
        logger.info('Define a unique scene for the Brain and Source objects')
        sc = SceneObj()
        sc.add_to_subplot(s_obj)
        sc.add_to_subplot(b_obj, use_this_cam=True)
        return sc
    else:  # return the BrainObj and SourceObj
        s_obj.visible_obj = True
        return b_obj, s_obj
c_connect = np.ma.masked_array(c_connect, mask=True)
nz = np.where((c_connect > -5) & (c_connect < 5))
c_connect.mask[nz] = False
kwargs['c_connect'] = c_connect

# ---------------- Time-series ----------------
kwargs['ts_data'] = 100. * np.random.rand(10, 100)
kwargs['ts_select'] = np.ones((10, ), dtype=bool)
kwargs['ts_select'][[3, 4, 7]] = False

# ---------------- Pictures ----------------
kwargs['pic_data'] = 100. * np.random.rand(10, 20, 17)

# ---------------- Application  ----------------
app = QtWidgets.QApplication([])
vb = Brain(**kwargs)


class TestBrain(object):
    """Test brain.py."""

    ###########################################################################
    #                                 SETTINGS
    ###########################################################################
    def test_create_tmp_folder(self):
        """Create tmp folder."""
        if not os.path.exists(path_to_tmp):
            os.makedirs(path_to_tmp)

    @staticmethod
    def _path_to_tmp(name):
"""You can also select a specific hemisphere
"""
# b_obj = BrainObj('B3', translucent=False, hemisphere='left')  # 'right'

"""For the inflated, white and translucent templates, sulcus can be also used
"""
# b_obj = BrainObj('inflated', translucent=False, hemisphere='right',
#                  sulcus=True)

"""Once the brain object created, pass it to the graphical user interface.

If you want to control the brain from the GUI, go to the Objects tab and select
'Brain' from the first combo box. You can also use the key shortcut b to
display/hide the brain.
"""
vb = Brain(brain_obj=b_obj, bgcolor='slateblue')

"""Display opaque right hemisphere of B3 :
"""
# vb.brain_control(template='B3', hemisphere='right', translucent=False)

"""
Fixed frontal view
"""
# vb.rotate(fixed='front')

"""
Display transparent left hemisphere of B2
"""
# vb.brain_control(template='B2', hemisphere='left', alpha=.05)
Exemple #4
0
"""
c_connect = 100. * np.random.rand(n_sources, n_sources)
mask = np.logical_and(12.1 <= c_connect, c_connect < 15.23)
c_connect = np.ma.masked_array(c_connect, mask=~mask)
"""
Define random pictures with values between [0, 50]. Each picture have 10 rows
and 20 columns
"""
pic_data = 50. * np.random.rand(n_sources, 10, 20)
"""
Define the Brain instance and pass variables for sources, pictures and
connections
"""
vb = Brain(s_xyz=s_xyz,
           s_data=s_data,
           a_template='B3',
           c_connect=c_connect,
           pic_data=pic_data)
"""
Set the Brain opaque to visualize the cortical projection :
"""
vb.brain_control(transparent=False)
"""
The colormap of the projection can't be controlled without running it. So we
print the list of controllable objects before and after the cortical
projection. Note that the code below is the same for the cortical_repartition()
method.
"""
print("List of controllable objects before projection : ", vb.cbar_list())
vb.cortical_projection()
print("List of controllable objects after projection : ", vb.cbar_list())
Exemple #5
0
See : https://brainder.org/download/flair/

.. image:: ../../picture/picbrain/ex_crossec_and_volume.png
"""
from visbrain import Brain
from visbrain.objects import CrossSecObj, VolumeObj
from visbrain.io import download_file
"""Import the volume and the associated affine transformation
"""
volume_name = 'GG-853-WM-0.7mm.nii.gz'  # 'GG-853-GM-0.7mm.nii.gz'
"""Download the file.
"""
path = download_file(volume_name)
"""Define a cross-section object

Go to the Objects tab and select 'Cross-section' in the combo box. You can also
press x to display the cross-section panel.
"""
cs_obj = CrossSecObj(path, section=(70, 171, 80), cmap='gist_stern')
"""Define a volume object.

Go to the Objects tab and select 'Volume' in the combo box. You can also
press x to display the volume panel.
"""
v_obj = VolumeObj(path)
"""Create the GUI and pass the cross-section and the volume object
"""
vb = Brain(cross_sec_obj=cs_obj, vol_obj=v_obj)
vb.show()
p_obj1 = Picture3DObj('P1', pic_data, xyz_1)
p_obj2 = Picture3DObj('P2', 2 * pic_data, xyz_2)

# ---------------- ROI // Volume // Cross-sections ----------------
# ROI :
roi_obj = RoiObj('brodmann')
roi_obj.select_roi([4, 6])
# Volume :
vol_obj = VolumeObj('aal')
# Cross-sections :
cs_obj = CrossSecObj('aal')
cs_obj.cut_coords((50, 60, 70))

# ---------------- Application  ----------------
vb = Brain(source_obj=[s_obj1, s_obj2], connect_obj=[c_obj, c_obj2],
           time_series_obj=[ts_obj1, ts_obj2], picture_obj=[p_obj1, p_obj2],
           roi_obj=roi_obj, vol_obj=vol_obj, cross_sec_obj=cs_obj,
           verbose='debug')


class TestBrain(_TestVisbrain):
    """Test brain.py."""

    ###########################################################################
    #                                 BRAIN
    ###########################################################################
    def test_scene_rotation(self):
        """Test scene rotations/."""
        rotations = ['axial_0', 'coronal_0', 'sagittal_0',
                     'axial_1', 'coronal_1', 'sagittal_1', 'top', 'bottom',
                     'back', 'front', 'left', 'right']
        customs = [(90., 0.), (-90, 90.), (180., 180.)]
# Randomize the amplitude and the phase of sine :
amp = np.random.randint(2, 20, n_sources).reshape(-1, 1)
pha = np.random.randint(1, 7, n_sources).reshape(-1, 1)
# Build the time series of shape (n_sources, n_time_points) :
ts_data = amp * np.sin(2 * np.pi * pha * time)
ts_data += np.random.randn(n_sources, n_time_points)

# Use a boolean vector to hide/display time-series :
ts_to_mask = [5, 7, 11, 3, 14, 17, 22, 23]
ts_select = np.ones((s_xyz.shape[0], ), dtype=bool)
ts_select[ts_to_mask] = False

# Time-series (TS) graphical properties :
ts_amp = 5.4  # TS graphical amplitude
ts_width = 15.7  # TS graphical width
ts_color = 'orange'  # TS color
ts_dxyz = (1., 2., 5.)  # TS offset along the (x, y, z) axes
ts_lw = 2.2  # TS line-width

b = Brain(s_xyz=s_xyz,
          s_text=s_text,
          ts_data=ts_data,
          s_textsize=s_textsize,
          ts_amp=ts_amp,
          ts_width=ts_width,
          ts_color=ts_color,
          ts_dxyz=ts_dxyz,
          ts_lw=ts_lw,
          ts_select=ts_select)
b.show()
kwargs['s_cmap'] = 'Spectral_r'  # Matplotlib colormap
kwargs['s_clim'] = (-50., 50.)
kwargs['s_vmin'], kwargs['s_vmax'] = None, 21
kwargs['s_under'], kwargs['s_over'] = 'midnightblue', "#e74c3c"

# Colormap properties (for connectivity) :
kwargs['c_cmap'] = 'gnuplot'  # Matplotlib colormap
kwargs['c_vmin'], kwargs['c_vmax'] = umin + 0.2, umax - 0.1
kwargs['c_under'], kwargs['c_over'] = 'green', "white"
kwargs['c_clim'] = [umin, umax]
"""
Finally, use c_colorby to define how connections have to be colored.
if c_colorby is 'count', it's the number of connections which pear node
drive the colormap. If 'strength', it's the connectivity strength between
two nodes.
"""
kwargs['c_colorby'] = 'strength'
kwargs['c_radiusmin'] = 4
kwargs['c_dynamic'] = (0.1, 1)

# Atlas template and opacity :
kwargs['a_template'] = 'B1'

# Set font size, color and label for the colorbar :
kwargs['cb_fontsize'] = 15
kwargs['cb_fontcolor'] = 'white'
kwargs['cb_label'] = 'My colorbar label'

vb = Brain(**kwargs)
vb.show()
# Load the xyz coordinates and corresponding subject name :
mat = np.load('xyz_sample.npz')
kwargs['s_xyz'], subjects = mat['xyz'], mat['subjects']

# Now, create some random data between [-50,50]
kwargs['s_data'] = 50. * np.random.rand(kwargs['s_xyz'].shape[0])
kwargs['s_color'] = 'darkred'
kwargs['s_opacity'] = .5

# Control the dynamic range of sources radius :
kwargs['s_radiusmin'] = 2  # Minimum radius
kwargs['s_radiusmax'] = 8  # Maximum radius
kwargs['s_edgewidth'] = 0.  # Width of the edges

# Create a brain instance :
vb = Brain(**kwargs)
"""First, make a basic screenshot of the scene.

Once exported, you should notice that the image doesn't have a proper size to
be used in a paper. In addition, the image have to cropped to remove
unnecessary background.
"""
vb.screenshot('0_main_brain.jpg')

# Run the cortical projection :
vb.cortical_projection(clim=(0, 50),
                       cmap='Spectral_r',
                       vmin=10.1,
                       isvmin=True,
                       under='black',
                       vmax=41.2,
Exemple #10
0
.. image:: ../../picture/picbrain/ex_region_of_interest.png
"""
from __future__ import print_function
import numpy as np

from visbrain import Brain

# Load thalamus sources :
s_xyz = np.loadtxt('thalamus.txt')
# Load alpha power. In fact, the PX.npy contains the power across several time
# windows. So we take the mean across time :
s_data = np.load('Px.npy').mean(1) * 10e26

# Define a Brain instance :
vb = Brain(s_xyz=s_xyz, s_data=s_data, s_cmap='viridis')
# Rotate the brain in axial view :
vb.rotate(fixed='axial_0')
"""
Select the thalamus index (76 for the left and 77 for the right). If you
don't know what is the index of your ROI, open the GUI and look at the
number in front of the name. Otherwise, un comment the following line :
"""
# print(vb.roi_list('AAL'))
vb.roi_control(selection=[76, 77],
               subdivision='AAL',
               smooth=5,
               name='thalamus')

# Project the source's activity onto ROI directly :
vb.cortical_projection(project_on='thalamus',
idx_thalamus = roi_obj.where_is('Thalamus')
roi_obj.select_roi(idx_thalamus, smooth=5)
"""Once the ROI object created, we can project source's alpha modulations
directly on the thalamus
"""
roi_obj.project_sources(s_obj,
                        cmap='Spectral_r',
                        clim=(200., 2000.),
                        vmin=300.,
                        under='gray',
                        vmax=1800.,
                        over='darkred')
"""You can also force sources to fit onto the thalamus
"""
# s_obj.fit_to_vertices(roi_obj.vertices)
"""
"""
b_obj = BrainObj('B3')

# Define a Brain instance :
vb = Brain(brain_obj=b_obj, source_obj=s_obj, roi_obj=roi_obj)
"""Select the colorbar of the ROI
"""
vb.cbar_select('ROI')
"""Eventualy, take a screenshot
"""
# vb.screenshot('thalamus.png', autocrop=True)

# Show the interface :
vb.show()
                 cbtxtsz=4.)
"""Parcellize the left hemisphere using the Destrieux Atlas. By default, no
parcellates are selected
"""
b_obj.parcellize(path_to_file1, hemisphere='left')
"""If you want to get the list of all predefined parcellates, use the
`get_parcellates` method which returns a pandas DataFrame with the index, the
name and the color associated to each parcellates
"""
df = b_obj.get_parcellates(path_to_file2)
# print(df)
"""Select only some parcellates. Note that this parcellization is using an
other atlas (Desikan-Killiany atlas)
"""
select = [
    'insula', 'paracentral', 'precentral', 'precuneus', 'frontalpole',
    'temporalpole', 'fusiform', 'cuneus', 'inferiorparietal',
    'inferiortemporal', 'precentral', 'superiorfrontal', 'superiortemporal'
]
"""Instead of using predefined colors inside the annot file, we use some data
"""
data = np.arange(len(select))
b_obj.parcellize(path_to_file2,
                 hemisphere='right',
                 select=select,
                 data=data,
                 cmap='Spectral_r')
"""Finally, pass the brain object to `Brain` and disply the GUI
"""
Brain(brain_obj=b_obj).show()
n = len(b_obj)  # Get the number of vertices

dtype = [('vertices', float, 3), ('normals', float, 3)]  # Arrows dtype
arrows = np.zeros(n, dtype=dtype)  # Empty arrows array
arrows['vertices'] = b_obj.vertices  # Set the vertices
arrows['normals'] = b_obj.normals  # Set the normals

# For the data, we use the distance between 0 and each vertex
data = np.linalg.norm(b_obj.vertices, axis=1)
# We only select vectors with a distance in [60., 60.2]
select = np.logical_and(data >= 60., data <= 60.2)

v_obj3 = VectorObj('v3',
                   arrows,
                   data=data,
                   select=select,
                   line_width=2.,
                   arrow_size=7.,
                   arrow_type='inhibitor_round',
                   antialias=True,
                   cmap='Spectral_r',
                   vmin=60.05,
                   under='gray')
# Finally, re-select both brain hemispheres.
b_obj.hemisphere = 'both'

vb = Brain(brain_obj=b_obj,
           vector_obj=[v_obj1, v_obj2, v_obj3],
           source_obj=[s_obj, s_obj2])
vb.show()
Exemple #14
0

def create_connect(xyz, min, max):
    """Create connectivity dataset."""
    # Create a random connection dataset :
    connect = 100. * np.random.rand(len(xyz), len(xyz))
    # Mask the connection aray :
    connect = np.ma.masked_array(connect, False)
    # Hide lower triangle :
    connect.mask[np.tril_indices_from(connect.mask)] = True
    # Hide connexions that are not between min and max :
    connect.mask[np.logical_or(connect.data < min, connect.data > max)] = True
    return connect


vb = Brain()

# ================ ADD SOURCE OBJECTS ================
# Add left hemisphere sources :
vb.add_sources('sources_L',
               s_xyz=s_xyzL,
               s_symbol='disc',
               s_color='#ab4642',
               s_edgecolor='black',
               s_scaling=True,
               s_data=np.random.normal(size=len(s_xyzL)),
               s_radiusmin=1.,
               s_radiusmax=10.,
               s_opacity=.5)

# Add right / front sources :
Exemple #15
0
from visbrain.io import download_file
"""Download and the load the Custom.npz archive. This file contains vertices
and faces of a brain template that is not integrated by default in Visbrain.
"""
mat = np.load(download_file('Custom.npz'))
"""Get vertices and faces from the archive.

In this examples, normals are also present in the archive. If you don't have
the normals, the BrainObj will compute it automatically.
"""
vert, faces, norms = mat['vertices'], mat['faces'], mat['normals']
"""Define the brain object
"""
b_obj = BrainObj('Custom', vertices=vert, faces=faces, normals=norms)
"""Then you have two strategies :
* If you are going to use this template a lot and don't want to redefine it
  every times, use `b_obj.save()`. Once the object saved, it can be reloaded
  using its name only `BrainObj('Custom')`
* If you only need it once, the template is temporaly saved and remove once the
  GUI is closed.
"""
# b_obj.save()
b_obj = BrainObj('Custom')
"""Define the GUI and pass the brain template
"""
vb = Brain(brain_obj=b_obj)
vb.show()

# If you want to remove the template :
# b_obj.remove()
Exemple #16
0
"""
Display fMRI activation
=======================

Display fMRI activations from a nii.gz file (NiBabel required).

See the original example :

https://pysurfer.github.io/auto_examples/plot_fmri_activation.html#sphx-glr-auto-examples-plot-fmri-activation-py

.. image:: ../../picture/picpysurfer/ex_pysurfer_fmri_activations.png
"""
from visbrain import Brain
from visbrain.objects import BrainObj
from visbrain.io import path_to_visbrain_data, download_file

"""Download file if needed
"""
file_name = 'lh.sig.nii.gz'
download_file(file_name)
file = path_to_visbrain_data(file=file_name)


b_obj = BrainObj('inflated', translucent=False, sulcus=True)
b_obj.add_activation(file=file, clim=(5., 20.), hide_under=5, cmap='viridis',
                     hemisphere='left')

vb = Brain(brain_obj=b_obj)
vb.rotate('left')
vb.show()
from visbrain.objects import VolumeObj, CrossSecObj, SourceObj
from visbrain.io import download_file

"""Download two NIFTI files
"""
path_1 = download_file('GG-853-GM-0.7mm.nii.gz')
path_2 = download_file('GG-853-WM-0.7mm.nii.gz')

"""Define four sources sources and a Source object
"""
s_xyz = np.array([[29.9, -37.3, -19.3],
                  [-5.33, 14.00, 20.00],
                  [25.99, 14.00, 34.66],
                  [0., -1.99, 10.66]])
s_obj = SourceObj('MySources', s_xyz)

"""Define a volume object and a cross-section object
"""
vol_obj = VolumeObj(path_1)
cross_sec_obj = CrossSecObj(path_2)

"""Localize a source in the cross-section object
"""
cross_sec_obj.localize_source(s_xyz[2, :])

"""Define a Brain instance and pass the source, volume and cross-section
object
"""
vb = Brain(source_obj=s_obj, vol_obj=vol_obj, cross_sec_obj=cross_sec_obj)
vb.show()
    * 'axial_0': top view
    * 'axial_1': bottom view
    * 'coronal_0': front
    * 'coronal_1': back view
    * 'sagittal_0': left view
    * 'sagittal_1': right view

Custom rotation consist of a tuple of two floats repsectively for azimuth and
elevation.

.. image:: ../../picture/picbrain/ex_brain_control.png
"""
from visbrain import Brain

# Define the Brain instance :
vb = Brain()
"""
Display opaque B1 :
"""
# vb.brain_control(template='B1', transparent=False)
"""
Display tranparent right hemisphere of B3 :
"""
# vb.brain_control(template='B3', hemisphere='right', alpha=.1)
"""
Fixed frontal view
"""
# vb.rotate(fixed='coronal_0')
"""
Display transparent left hemisphere of B2
"""
.. image:: ../../picture/picbrain/ex_add_nifti.png
"""
from __future__ import print_function
import numpy as np
import os

from visbrain import Brain
from visbrain.io import read_nifti

# Define four sources sources :
s_xyz = np.array([[29.9, -37.3, -19.3], [-5.33, 14.00, 20.00],
                  [25.99, 14.00, 34.66], [0., -1.99, 10.66]])

# Define a Brain instance :
vb = Brain(s_xyz=s_xyz)

# Print the list of volumes available :
print('Volumes available by default : ', vb.volume_list())
"""
If you don't have access to a Nifti file, download one of the volume available
at https://brainder.org/download/flair/.
"""
path_to_nifti1 = os.getenv("HOME")  # Path to the Nifti file
file_nifti1 = 'GG-853-GM-0.7mm.nii.gz'  # Name of the Nifti file
path1 = os.path.join(path_to_nifti1, file_nifti1)
"""
Load the Nifti file. The read_nifti function load the data and the
transformation to convert data into the MNI space :
"""
data1, header1, tf1 = read_nifti(path1)
Exemple #20
0
roi_obj = RoiObj('brodmann')
idx_4_6 = roi_obj.where_is(['BA4', 'BA6'], exact=True)
roi_color = {
    idx_4_6[0]: 'red',  # BA4 in red and BA6 in green
    idx_4_6[1]: 'green'
}
roi_obj.select_roi(idx_4_6,
                   unique_color=True,
                   roi_to_color=roi_color,
                   smooth=7)
"""Create a brain object
"""
b_obj = BrainObj('B1', hemisphere='both', translucent=True)
"""Pass the brain, source and ROI object to the GUI
"""
vb = Brain(brain_obj=b_obj, source_obj=s_obj, roi_obj=roi_obj)
"""Render the scene and save the jpg picture with a 300dpi
"""
save_as = os.path.join(save_pic_path, '0_main_brain.jpg')
vb.screenshot(save_as, dpi=300, print_size=(10, 10), autocrop=True)
"""Project source's activity onto the surface
"""
vb.cortical_projection(clim=(0, 50),
                       cmap='Spectral_r',
                       vmin=10.1,
                       under='black',
                       vmax=41.2,
                       over='green')
vb.sources_control('Sobj', visible=False)  # Hide sources
vb.rotate(custom=(-160., 10.))  # Rotate the brain
vb.brain_control(translucent=False)  # Make the brain opaque
Exemple #21
0
"""
umin, umax = 30, 31

# 1 - Using select (0: hide, 1: display):
select = np.zeros_like(connect)
select[(connect > umin) & (connect < umax)] = 1

# 2 - Using masking (True: hide, 1: display):
connect = np.ma.masked_array(connect, mask=True)
connect.mask[np.where((connect > umin) & (connect < umax))] = False

print('1 and 2 equivalent :', np.array_equal(select, ~connect.mask + 0))
"""Create the connectivity object :
"""
c_obj = ConnectObj('ConnectObj1',
                   xyz,
                   connect,
                   color_by='strength',
                   dynamic=(.1, 1.),
                   cmap='gnuplot',
                   vmin=umin + .2,
                   vmax=umax - .1,
                   under='red',
                   over='green',
                   clim=(umin, umax),
                   antialias=True)
"""Finally, pass source and connectivity objects to Brain :
"""
vb = Brain(source_obj=s_obj, connect_obj=c_obj)

vb.show()
Exemple #22
0
kw_activation = dict(cmap='viridis',
                     hide_under=10000,
                     clim=(active_data.min(), active_data.max()),
                     vmax=20000,
                     over='red')

###############################################################################
# Get the brain and source objects
###############################################################################
# Note that here, we use `show=False`. In that case, a
# :class:`visbrain.objects.BrainObj` and a :class:`visbrain.objects.SourceObj`
# are returned.

b_obj, s_obj = mne_plot_source_space(fif_file,
                                     active_data=active_data,
                                     kw_brain_obj=kw_b_obj,
                                     kw_source_obj=kw_s_obj,
                                     kw_activation=kw_activation,
                                     show=False)

###############################################################################
# Pass the brain and source objects to the :class:`visbrain.Brain` module
###############################################################################
# Note that here, we pass the source object to Brain but by default we set it
# as not visible. But if you don't need to see it, simply remove the
# `source_obj=s_obj`

s_obj.visible_obj = False
brain = Brain(brain_obj=b_obj, source_obj=s_obj)
brain.show()
n_time_points = 700         # Number of time points
n_sources = s_xyz.shape[0]  # Number of sources
time = np.mgrid[0:n_sources, 0:n_time_points][1] / sf  # Time vector
# Randomize the amplitude and the phase of sine :
amp = np.random.randint(2, 20, n_sources).reshape(-1, 1)
pha = np.random.randint(1, 7, n_sources).reshape(-1, 1)
# Build the time series of shape (n_sources, n_time_points) :
ts_data = amp * np.sin(2 * np.pi * pha * time)
ts_data += np.random.randn(n_sources, n_time_points)

# Use a boolean vector to hide/display time-series :
ts_to_mask = [5, 7, 11, 3, 14, 17, 22, 23]
ts_select = np.ones((s_xyz.shape[0],), dtype=bool)
ts_select[ts_to_mask] = False

# Time-series (TS) graphical properties :
ts_amp = 5.4            # TS graphical amplitude
ts_width = 15.7         # TS graphical width
ts_color = 'orange'     # TS color
ts_dxyz = (1., 2., 5.)  # TS offset along the (x, y, z) axes
ts_lw = 2.2             # TS line-width

"""Define the 3-D time-series object
"""
ts = TimeSeries3DObj('Ts1', ts_data, s_xyz, select=ts_select, ts_amp=ts_amp,
                     ts_width=ts_width, line_width=ts_lw, translate=ts_dxyz,
                     color=ts_color)

vb = Brain(time_series_obj=ts, source_obj=s_obj)
vb.show()
===========================================

Load a Graphical User Interface configuration file and retrieve previous
session.

Load this file :

* From the menu File/load/GUI config
* Using the load_config() method

Download configuration file (config.txt) :
https://drive.google.com/open?id=0B6vtJiCQZUBvUm9menhtUzVhS2M

.. image:: ../../picture/picbrain/ex_load_config.png
"""
from visbrain import Brain

# Define an empty Brain instance
vb = Brain()

# Load the configuration file :
vb.load_config('config.txt')

"""
Alternatively, if you want you can use the following method to save a new
configuration file or using the menu File/Save/GUI config
"""
# vb.save_config('new_config_file.txt')

vb.show()
Exemple #25
0
x, y = np.append(x, np.flip(x, 0)), np.append(y, np.flip(y, 1))
time = (x.reshape(-1, 1) + y.reshape(1, -1)) / sf
time = np.tile(time[np.newaxis, ...], (n_sources, 1, 1))
coef = s_xyz[:, 0].reshape(-1, 1, 1) / 2.
data = np.sinc(coef * 2 * np.pi * 1. * time)
data += .2 * np.random.rand(*data.shape)
"""If you want to remove some pictures, define a pic_select array of boolean
values and specify if those pictures has to be hide or displayed :
"""
pic_select = np.ones((n_sources, ), dtype=bool)
pic_to_hide = [2, 6, 10, 11, 31, 35, 41, 44, 51, 55]
pic_select[pic_to_hide] = False
kwargs['select'] = pic_select

kwargs['pic_width'] = 5.  # Width
kwargs['pic_height'] = 5.  # Height
kwargs['translate'] = (4., 5., 1.)  # Offset along (x, y, z) axis
kwargs['clim'] = (.01, 1.12)  # Colorbar limits
kwargs['cmap'] = 'viridis'  # Colormap
kwargs['vmin'] = .1  # Vmin
kwargs['under'] = 'gray'  # Color under vmin
kwargs['vmax'] = .9  # Vmax
kwargs['over'] = '#ab4642'  # Color over vmax
kwargs['cblabel'] = '3D Pictures'  # Color over vmax
"""Define the 3-D picture object
"""
pic = Picture3DObj('Pic', data, s_xyz, **kwargs)

vb = Brain(picture_obj=pic, source_obj=s_obj)
vb.show()
Exemple #26
0
kwargs['s_data'] = np.random.rand(N)
kwargs['s_radiusmin'] = 1.
kwargs['s_radiusmax'] = 20.

n_connections = 150
rnd_selection = np.random.randint(0, N**2, n_connections)
connect = np.random.randint(0, 20, (N, N))
mask = np.ones(N**2, dtype=bool)
mask[rnd_selection] = False
mask = mask.reshape(N, N)
connect = np.ma.masked_array(connect, mask=mask)
kwargs['c_connect'] = connect

kwargs['ui_bgcolor'] = 'white'

vb = Brain(**kwargs)

# ============= Sources =============
# # Screenshot of the default view :
vb.screenshot('default.png', autocrop=True)

# Hide sources that are not in the brain :
vb.sources_display('inside')
vb.screenshot('inside.png', autocrop=True)

# ============= Connectivity =============
# Colorby network density :
vb.sources_display('none')
vb.connect_control(show=True,
                   cmap='magma',
                   colorby='density',
                              'WhiteMatter.npz'))
mat = np.load(path_to_file)

"""
Get variables for defining a new template. Vertices are nodes connected by the
faces variable. Normals are vectors orthogonal to each normals (used for light
adaptation according to camera rotations). lr_index is an array of boolean
values which specify where are the left and right hemispheres. This variable
can be set to None.
"""
vert, faces, norm = mat['vertices'], mat['faces'], mat['normals']
lr_index = mat['lr_index']

"""
Add the template to visbrain. After adding this template, it will always be
accessible unless you use the remove_brain_template() function
"""
add_brain_template('WhiteMatter', vert, faces, norm, lr_index)

"""
Open the interface and select the added template
"""
vb = Brain(a_template='WhiteMatter')
print('Brain templates : ', vb.brain_list())
vb.show()

"""
Finally, and this is not a necessity, remove the template
"""
remove_brain_template('WhiteMatter')
Exemple #28
0
def mne_plot_source_estimation(sbj,
                               sbj_dir,
                               fwd_file,
                               stc_file=None,
                               hemisphere='both',
                               parc='aparc',
                               active_data=0,
                               kw_brain_obj={},
                               kw_source_obj={},
                               kw_activation={},
                               show=True):
    """Plot source estimation.

    Parameters
    ----------
    sbj : string
        The subject name.
    sbj_dir : string
        Path to the subject directory.
    fwd_file : string
        The file name of the forward solution, which should end with -fwd.fif
        or -fwd.fif.gz.
    stc_file : string | None
        Path to the *.stc inverse solution file.
    hemisphere : {'left', 'both', 'right'}
        The hemisphere to plot.
    parc : string | 'aparc'
        The parcellation to use, e.g., ‘aparc’ or ‘aparc.a2009s’.
    active_data : array_like, int | 0
        The data to set to vertices. If an stc file is provided and if
        `active_data` is an integer, it describes the time instant in which you
        want to see the activation. Otherwise, `active_data` must be an array
        with the same same shape as the number of active vertices.
    kw_brain_obj : dict | {}
        Additional inputs to pass to the `BrainObj` class.
    kw_source_obj : dict | {}
        Additional inputs to pass to the `SourceObj` class.
    kw_activation : dict | {}
        Additional inputs to pass to the `BrainObj.add_activation` method.
    show : bool | False
        If True, the window of the `Brain` module is automatically displayed.
        If False, a BrainObj and a SourceObj are returned. Finally, if 'scene'
        a SceneObj is returned.

    Returns
    -------
    b_obj : BrainObj
        A predefined `BrainObj` (if `show=False`)
    s_obj : SourceObj
        A predefined `SourceObj`, hide by default (if `show=False`)
    """
    # Test that mne is installed and import :
    is_mne_installed(raise_error=True)
    import mne
    from mne.source_space import head_to_mni
    hemi_idx = {'left': [0], 'right': [1], 'both': [0, 1]}[hemisphere]
    # Read the forward solution :
    fwd = mne.read_forward_solution(fwd_file)
    logger.debug('Read the forward solution')
    # Get source space :
    fwd_src = fwd['src']
    # Get the MRI (surface RAS)-> head matrix
    mri_head_t = fwd['mri_head_t']
    # Head to MNI conversion
    logger.info("Head to MNI conversion")
    mesh, sources = [], []
    for hemi in hemi_idx:
        vert_ = fwd_src[hemi]['rr']
        sources_ = fwd_src[hemi]['rr'][fwd_src[hemi]['vertno']]
        m_ = head_to_mni(vert_, sbj, mri_head_t, subjects_dir=sbj_dir)
        s_ = head_to_mni(sources_, sbj, mri_head_t, subjects_dir=sbj_dir)
        mesh.append(m_)
        sources.append(s_)
    # Get active vertices :
    # fwd_src contains the source spaces, the first 2 are the cortex
    # (left and right hemi, the others are related to the substructures)
    if len(hemi_idx) == 1:
        active_vert = fwd_src[hemi_idx[0]]['vertno']
    else:
        active_left = fwd_src[0]['vertno']
        active_right = fwd_src[1]['vertno'] + mesh[0].shape[0]
        active_vert = np.r_[active_left, active_right]
    logger.info('%i active vertices detected ' % len(active_vert))
    # Add data to the mesh :
    if isinstance(active_data, np.ndarray):
        if len(active_data) != len(active_vert):
            logger.error("The length of `active data` (%i) must be the same "
                         "the length of the number of active vertices "
                         "(%i)" % (len(active_data), len(active_vert)))
            active_data = active_vert = None
        else:
            logger.info("Array of active data used.")
    elif isinstance(stc_file, str) and isinstance(active_data, int):
        # Get active data :
        assert os.path.isfile(stc_file)
        n_tp = active_data
        data = mne.read_source_estimate(stc_file).data
        active_data = np.abs(data[:, n_tp] / data[:, n_tp].max())
        logger.info("Time instant %i used for activation" % n_tp)
    else:
        logger.info("No active data detected.")
        active_data = active_vert = None
    # Concatenate vertices, faces and sources :
    vertices = np.concatenate(mesh)
    lr_index = np.r_[np.ones((len(mesh[0]), )), np.zeros((len(mesh[1]), ))]
    sources = np.concatenate(sources)
    # Get faces :
    if len(hemi_idx) == 1:
        faces = fwd_src[hemi_idx[0]]['tris']
    else:
        _faces_l = fwd_src[0]['tris']
        _faces_r = fwd_src[1]['tris'] + _faces_l.max() + 1
        faces = np.r_[_faces_l, _faces_r].astype(int)
    # Define a brain object and a source object :
    logger.info('Define a Brain and Source objects')
    from visbrain.objects import BrainObj, SourceObj, SceneObj
    b_obj = BrainObj(sbj + '_brain',
                     vertices=vertices,
                     faces=faces,
                     lr_index=lr_index.astype(bool),
                     **kw_brain_obj)
    s_obj = SourceObj(sbj + '_src', sources, visible=False, **kw_source_obj)
    # Add data to the BrainObj if needed :
    if isinstance(active_data, np.ndarray):
        logger.info("Add active data between "
                    "[%2f, %2f]" % (active_data.min(), active_data.max()))
        b_obj.add_activation(data=active_data,
                             vertices=active_vert,
                             **kw_activation)
    # Return either a scene or a BrainObj and SourceObj :
    if show:  # Display inside the Brain GUI
        # Define a Brain instance :
        from visbrain import Brain
        brain = Brain(brain_obj=b_obj, source_obj=s_obj)
        # Remove all brain templates except the one of the subject :
        brain._brain_template.setEnabled(False)
        # By default, display colorbar if activation :
        if isinstance(active_data, np.ndarray):
            brain.menuDispCbar.setChecked(True)
            brain._fcn_menu_disp_cbar()
        brain.show()
    elif show is 'scene':  # return a SceneObj
        logger.info('Define a unique scene for the Brain and Source objects')
        sc = SceneObj()
        sc.add_to_subplot(s_obj)
        sc.add_to_subplot(b_obj, use_this_cam=True)
        return sc
    else:  # return the BrainObj and SourceObj
        return b_obj, s_obj
sf = 1024.
n = 50
x, y = np.ogrid[0:n / 2, 0:n / 2]
x, y = np.append(x, np.flip(x, 0)), np.append(y, np.flip(y, 1))
time = (x.reshape(-1, 1) + y.reshape(1, -1)) / sf
time = np.tile(time[np.newaxis, ...], (n_sources, 1, 1))
coef = s_xyz[:, 0].reshape(-1, 1, 1) / 2.
kwargs['pic_data'] = np.sinc(coef * 2 * np.pi * 1. * time)
kwargs['pic_data'] += .2 * np.random.rand(*kwargs['pic_data'].shape)

# If you want to remove some pictures, define a pic_select array of boolean
# values and specify if those pictures has to be hide or displayed :
pic_select = np.ones((n_sources,), dtype=bool)
pic_to_hide = [2, 6, 10, 11, 31, 35, 41, 44, 51, 55]
pic_select[pic_to_hide] = False
kwargs['pic_select'] = pic_select

kwargs['pic_width'] = 5.            # Width
kwargs['pic_height'] = 5.           # Height
kwargs['pic_dxyz'] = (4., 5., 1.)   # Offset along (x, y, z) axis
kwargs['pic_clim'] = (.01, 1.12)    # Colorbar limits
kwargs['pic_cmap'] = 'viridis'      # Colormap
# kwargs['pic_vmin'] = .1             # Vmin
# kwargs['pic_under'] = 'gray'        # Color under vmin
# kwargs['pic_vmax'] = .9             # Vmax
# kwargs['pic_over'] = '#ab4642'      # Color over vmax

b = Brain(s_xyz=s_xyz, **kwargs)
b.show()
Exemple #30
0
                     vertices=vertices,
                     smoothing_steps=5,
                     clim=(7., 21.),
                     hide_under=7.,
                     cmap='viridis')

# Build arrows :
dt = np.dtype([('vertices', float, 3), ('normals', float, 3)])
arrows = np.zeros(len(data), dtype=dt)
arrows['vertices'] = b_obj.vertices[vertices, :]
arrows['normals'] = b_obj.normals[vertices, :]
select = data >= 7.

# Define the vector object :
v_obj = VectorObj('vector',
                  arrows,
                  data=data,
                  inferred_data=True,
                  clim=(7., 21.),
                  antialias=True,
                  cmap='viridis',
                  select=select,
                  line_width=2.,
                  arrow_coef=1.2,
                  dynamic=(.2, 1.))

# Finally, pass the brain and vector object to the Brain module :
vb = Brain(brain_obj=b_obj, vector_obj=v_obj)
vb.rotate('left')
vb.show()