Ejemplo n.º 1
0
def _plt_src(name, kw_brain_obj, active_data, active_vert, sources,
             kw_source_obj, kw_activation, show):
    # Define a brain object and a source object :
    logger.info('Define a Brain and Source objects')
    from visbrain.objects import BrainObj, SourceObj, SceneObj
    brain_obj, source_obj = name + '_brain', name + '_sources'
    b_obj = BrainObj(brain_obj, **kw_brain_obj)
    s_obj = SourceObj(source_obj, sources, **kw_source_obj)
    s_obj.visible_obj = False
    # Add data to the BrainObj if needed :
    if isinstance(active_data, np.ndarray):
        logger.info("Add active data between "
                    "[%2f, %2f]" % (active_data.min(), active_data.max()))
        b_obj.add_activation(data=active_data,
                             vertices=active_vert,
                             **kw_activation)
    # Return either a scene or a BrainObj and SourceObj :
    if show is True:  # Display inside the Brain GUI
        # Define a Brain instance :
        from visbrain import Brain
        brain = Brain(brain_obj=b_obj, source_obj=s_obj)
        # By default, display colorbar if activation :
        if isinstance(active_data, np.ndarray):
            brain.menuDispCbar.setChecked(True)
            brain._fcn_menu_disp_cbar()
        brain.show()
    elif show is 'scene':  # return a SceneObj
        logger.info('Define a unique scene for the Brain and Source objects')
        sc = SceneObj()
        sc.add_to_subplot(s_obj)
        sc.add_to_subplot(b_obj, use_this_cam=True)
        return sc
    else:  # return the BrainObj and SourceObj
        s_obj.visible_obj = True
        return b_obj, s_obj
Ejemplo n.º 2
0
def visbrain_plot(mesh, tex=None, caption=None, cblabel=None, visb_sc=None,
                  cmap='jet'):
    """
    Visualize a trimesh object using visbrain core plotting tool
    :param mesh: trimesh object
    :param tex: numpy array of a texture to be visualized on the mesh
    :return:
    """
    from visbrain.objects import BrainObj, ColorbarObj, SceneObj
    b_obj = BrainObj('gui', vertices=np.array(mesh.vertices),
                     faces=np.array(mesh.faces),
                     translucent=False)
    if not isinstance(visb_sc, SceneObj):
        visb_sc = SceneObj(bgcolor='black', size=(1000, 1000))
    # identify (row, col)
    row, _ = get_visb_sc_shape(visb_sc)
    visb_sc.add_to_subplot(b_obj, row=row, col=0, title=caption)

    if tex is not None:
        b_obj.add_activation(data=tex, cmap=cmap,
                             clim=(np.min(tex), np.max(tex)))
        CBAR_STATE = dict(cbtxtsz=20, txtsz=20., width=.1, cbtxtsh=3.,
                          rect=(-.3, -2., 1., 4.), cblabel=cblabel)
        cbar = ColorbarObj(b_obj, **CBAR_STATE)
        visb_sc.add_to_subplot(cbar, row=row, col=1, width_max=200)

    return visb_sc
Ejemplo n.º 3
0
 def test_get_parcellates(self):
     """Test function get_parcellates."""
     # Prepare the brain :
     b_obj = BrainObj('inflated')
     import pandas as pd
     file_1 = self.need_file(NEEDED_FILES['ANNOT_FILE_1'])
     file_2 = self.need_file(NEEDED_FILES['ANNOT_FILE_2'])
     df_1 = b_obj.get_parcellates(file_1)
     df_2 = b_obj.get_parcellates(file_2)
     assert all([isinstance(k, pd.DataFrame) for k in [df_1, df_2]])
Ejemplo n.º 4
0
 def test_supported_format(self):
     """Test for input formats."""
     for k in ['X3D_FILE', 'GII_FILE', 'OBJ_FILE']:
         file = self.need_file(NEEDED_FILES[k])
         BrainObj(file)
     # Test Freesurfer files
     _lh = self.need_file(NEEDED_FILES['LH_FREESURFER'])
     _rh = self.need_file(NEEDED_FILES['RH_FREESURFER'])
     BrainObj(_lh)
     BrainObj(_rh)
     BrainObj((_lh, _rh))
Ejemplo n.º 5
0
def plot_meg_connectome():
    '''
    Plot the MEG brain connectome for the master figure in MEG paper

    '''
    megdata = sio.loadmat('MEG_data_info/total_connecivtiy_coord_label.mat')
    total_con = megdata['connectivity']
    xyz = megdata['ROI_coords']
    normal_con = total_con[:53]
    smci_con = total_con[53:-28]
    pmci_con = total_con[-28:]
    edges = smci_con[8, :, :]

    sc = SceneObj(bgcolor='black')
    c_obj = ConnectObj('default',
                       xyz,
                       edges,
                       select=edges > .026,
                       line_width=3.,
                       dynamic=(0., 1.),
                       dynamic_orientation='center',
                       cmap='bwr',
                       color_by='strength')
    s_obj = SourceObj('sources', xyz, color='red', radius_min=15.)
    cb_obj = ColorbarObj(c_obj, cblabel='Edge strength')

    sc.add_to_subplot(c_obj, title='MEG brain network')
    sc.add_to_subplot(s_obj)
    sc.add_to_subplot(BrainObj('B3'), use_this_cam=True)
    sc.add_to_subplot(cb_obj, col=1, width_max=200)
    sc.preview()
Ejemplo n.º 6
0
def texture_plot(mesh,
                 tex=None,
                 caption=None,
                 cblabel=None,
                 visb_sc=None,
                 cmap='gnuplot'):
    """
    Projecting Texture onto trimesh object using visbrain core plotting tool
    :param mesh: trimesh object
    :param tex: numpy array of a texture to be visualized
    :return: 0
    """

    b_obj = BrainObj('gui',
                     vertices=np.array(mesh.vertices),
                     faces=np.array(mesh.faces),
                     translucent=False)
    if visb_sc is None:
        visb_sc = SceneObj(bgcolor='black', size=(1400, 1000))
        visb_sc.add_to_subplot(b_obj, title=caption)
        visb_sc_shape = (1, 1)
    else:
        visb_sc_shape = get_visb_sc_shape(visb_sc)
        visb_sc.add_to_subplot(b_obj,
                               row=visb_sc_shape[0] - 1,
                               col=visb_sc_shape[1],
                               title=caption)

    if tex is not None:
        b_obj.add_activation(data=tex,
                             cmap=cmap,
                             clim=(np.min(tex), np.max(tex)))
        CBAR_STATE = dict(cbtxtsz=20,
                          txtsz=20.,
                          width=.1,
                          cbtxtsh=3.,
                          rect=(-.3, -2., 1., 4.),
                          cblabel=cblabel)
        cbar = ColorbarObj(b_obj, **CBAR_STATE)
        visb_sc.add_to_subplot(cbar,
                               row=visb_sc_shape[0] - 1,
                               col=visb_sc_shape[1] + 1,
                               width_max=200)
    return visb_sc
Ejemplo n.º 7
0
 def test_update_cbar_from_obj(self):
     """Test function update_cbar_from_obj."""
     xyz = np.random.rand(10, 3)
     edges = np.random.rand(10, 10)
     s_obj = SourceObj('S1', xyz)
     b_obj = BrainObj('B1')
     c_obj = ConnectObj('C1', xyz, edges)
     im_obj = ImageObj('IM1', np.random.rand(10, 10))
     ColorbarObj(s_obj)
     ColorbarObj(c_obj)
     ColorbarObj(b_obj)
     ColorbarObj(im_obj)
Ejemplo n.º 8
0
 def test_parcellize(self):
     """Test function parcellize."""
     b_obj = BrainObj('inflated')
     file_1 = self.need_file(NEEDED_FILES['PARCELLATES_1'])
     file_2 = self.need_file(NEEDED_FILES['PARCELLATES_2'])
     b_obj.parcellize(file_1, hemisphere='left')
     select = ['insula', 'paracentral', 'precentral']
     data = np.arange(len(select))
     b_obj.parcellize(file_2, select=select, data=data, cmap='Spectral_r')
Ejemplo n.º 9
0
def _plt_src(name, kw_brain_obj, active_data, active_vert, sources,
             kw_source_obj, kw_activation, show):
    # Define a brain object and a source object :
    logger.info('    Define a Brain and Source objects')
    from visbrain.objects import BrainObj, SourceObj, SceneObj
    brain_obj, source_obj = name + '_brain', name + '_sources'
    b_obj = BrainObj(brain_obj, **kw_brain_obj)
    s_obj = SourceObj(source_obj, sources, **kw_source_obj)
    s_obj.visible_obj = False
    # Add data to the BrainObj if needed :
    if isinstance(active_data, np.ndarray):
        logger.info("    Add active data between "
                    "[%2f, %2f]" % (active_data.min(), active_data.max()))
        b_obj.add_activation(data=active_data, vertices=active_vert,
                             **kw_activation)
    # Return either a scene or a BrainObj and SourceObj :
    if show is True:  # Display inside the Brain GUI
        # Define a Brain instance :
        from visbrain.gui import Brain
        brain = Brain(brain_obj=b_obj, source_obj=s_obj)
        brain._brain_template.setEnabled(False)
        # By default, display colorbar if activation :
        if isinstance(active_data, np.ndarray):
            brain.menuDispCbar.setChecked(True)
            brain._fcn_menu_disp_cbar()
        brain.show()
    elif show is 'scene':  # return a SceneObj
        logger.info("    Define a unique scene for the Brain and Source "
                    "objects")
        sc = SceneObj()
        sc.add_to_subplot(s_obj)
        sc.add_to_subplot(b_obj, use_this_cam=True)
        return sc
    else:  # return the BrainObj and SourceObj
        s_obj.visible_obj = True
        return b_obj, s_obj
Ejemplo n.º 10
0
def create_scene ():
    sc = SceneObj (size=(1400, 1000))
    brain_objs = []

    # CREATE 4 BRAIN OBJECTS EACH WITH SPECOFOC ROTATION
    brain_objs = []
    for rot in ["left", "right", 'side-fl', 'side-fr', 'front', 'back']:
        brain_objs. append (BrainObj(name = 'inflated', hemisphere='both', translucent=False, cbtxtsz = 10., verbose = None))

    sc.add_to_subplot(brain_objs[0], row=0, col=0, rotate='right', title='Right', zoom = 3.5)
    sc.add_to_subplot(brain_objs[1], row=0, col=1, rotate='left', title='Left')
    sc.add_to_subplot(brain_objs[2], row=1, col=0, rotate='top', title='Top')
    sc.add_to_subplot(brain_objs[3], row=1, col=1, rotate='bottom', title='Bottom')
    sc.add_to_subplot(brain_objs[4], row=2, col=0, rotate='front', title='Front')
    sc.add_to_subplot(brain_objs[5], row=2, col=1, rotate='back', title='Back')
    return sc, brain_objs
Ejemplo n.º 11
0
def add_activation (areas_labels, brain_objs, annot_file_R, annot_file_L):

    right_areas = []
    left_areas = []
    for area in areas_labels:
        if area[-1] == 'R':
            right_areas. append (area)
        elif area[-1] == 'L':
            left_areas. append (area)

    for i in range (len (brain_objs)):
        brain_objs[i] = BrainObj('inflated', hemisphere='both', translucent=False)
        if len (left_areas) > 0:
            brain_objs[i].parcellize (annot_file_L, hemisphere='left',  select=left_areas)

        if len (right_areas) > 0:
            brain_objs[i].parcellize (annot_file_R, hemisphere='right',  select=right_areas)
Ejemplo n.º 12
0
def create_brain_obj (annot_file_R, annot_file_L, areas):
    brain_obj = BrainObj(name = 'inflated', hemisphere='both', translucent=False, cbtxtsz = 10., verbose = None) #, cblabel='Parcellates example', cbtxtsz=4.)
    left_areas = []
    right_areas = []

    for area in areas:
        if area[-1] == 'R':
            right_areas. append (area)
        elif area[-1] == 'L':
            left_areas. append (area)

    if len (left_areas) > 0:
        brain_obj.parcellize (annot_file_L, hemisphere='left',  select=left_areas)

    if len (right_areas) > 0:
        brain_obj.parcellize (annot_file_R, hemisphere='right',  select=right_areas)

    return brain_obj
Ejemplo n.º 13
0
def visbrain_plot(mesh, tex=None):
    """
    Visualize a trimesh object using visbrain core plotting tool
    :param mesh: trimesh object
    :param tex: numpy array of a texture to be visualized on the mesh
    :return:
    """
    from visbrain.objects import BrainObj

    # invert_normals = True -> Light outside
    # invert_normals = False -> Light inside
    b_obj = BrainObj('gui',
                     vertices=mesh.vertices,
                     faces=mesh.faces,
                     translucent=False,
                     invert_normals=True)
    if tex is not None:
        b_obj.add_activation(data=tex, cmap='viridis')
    b_obj.preview(bgcolor='white')
Ejemplo n.º 14
0
                   arrow_type='triangle_60')
s_obj = SourceObj('s1', xyz[sl_1, :], color='white', data=data, radius_min=10.)

"""The second vector object is the symetric of the first one but this time the
data are inferred from the norm of each vector.
"""
v_obj2 = VectorObj('v2', [-arrow_start, -arrow_end], inferred_data=True,
                   line_width=3., arrow_size=6., arrow_type='angle_90',
                   antialias=True, cmap='inferno')
s_obj2 = SourceObj('s2', -xyz[sl_1, :], color='red', radius_min=10)


"""Finally, the last vector object is defined using the vertices and the
normals of the right hemisphere of the brain.
"""
b_obj = BrainObj('B2', hemisphere='right')  # Define the brain object
n = len(b_obj)                              # Get the number of vertices

dtype = [('vertices', float, 3), ('normals', float, 3)]  # Arrows dtype
arrows = np.zeros(n, dtype=dtype)                        # Empty arrows array
arrows['vertices'] = b_obj.vertices                      # Set the vertices
arrows['normals'] = b_obj.normals                        # Set the normals

# For the data, we use the distance between 0 and each vertex
data = np.linalg.norm(b_obj.vertices, axis=1)
# We only select vectors with a distance in [60., 60.2]
select = np.logical_and(data >= 60., data <= 60.2)

v_obj3 = VectorObj('v3', arrows, data=data, select=select, line_width=2.,
                   arrow_size=7., arrow_type='inhibitor_round', antialias=True,
                   cmap='Spectral_r', vmin=60.05, under='gray')
Ejemplo n.º 15
0
# Generate random data and random connectivity
data = np.random.uniform(low=-1., high=1., size=(n_sources,))
conn = np.triu(np.random.uniform(-1., 1., (n_sources, n_sources)))
conn_select = (-.005 < conn) & (conn < .005)

# Scene creation
sc = SceneObj()


###############################################################################
# Animate a single brain object
###############################################################################
# Here we set an animation for a single brain object.

b_obj_1 = BrainObj('inflated', translucent=False)
b_obj_1.animate()
sc.add_to_subplot(b_obj_1, rotate='left', title='Animate a single object')

###############################################################################
# Animate multiple objects
###############################################################################
# Here we animate multiple objects inside a subplot

s_obj_1 = SourceObj('s1', xyz, data=data)
b_obj_2 = BrainObj('white')
b_obj_2.animate()

sc.add_to_subplot(s_obj_1, row=1, title='Animate multiple objects')
sc.add_to_subplot(b_obj_2, row=1, rotate='right', use_this_cam=True)
Ejemplo n.º 16
0
"""
Display conjunction map
=======================

Display a conjunction map from a nii.gz file (NiBabel required).

See the original PySurfer example :

https://pysurfer.github.io/auto_examples/plot_fmri_conjunction.html#sphx-glr-auto-examples-plot-fmri-conjunction-py

.. image:: ../../_static/examples/ex_eegmeg_conjunction_map.png
"""
from visbrain.gui import Brain
from visbrain.objects import BrainObj
from visbrain.io import download_file

"""Download files if needed
"""
file_1 = download_file('lh.sig.nii.gz', astype='example_data')
file_2 = download_file('lh.alt_sig.nii.gz', astype='example_data')

b_obj = BrainObj('inflated', translucent=False, sulcus=True)
b_obj.add_activation(file=file_1, clim=(4., 30.), hide_under=4, cmap='Reds_r',
                     hemisphere='left')
b_obj.add_activation(file=file_2, clim=(4., 30.), hide_under=4, cmap='Blues_r',
                     hemisphere='left')

vb = Brain(brain_obj=b_obj)
vb.rotate('left')
vb.show()
Ejemplo n.º 17
0
.. image:: ../../picture/picbrain/ex_brain_control.png
"""
from visbrain import Brain
from visbrain.objects import BrainObj

"""Visbrain comes with three default templates :
* B1 (with cerebellum)
* B2
* B3

Three templates can also be downloaded :
* inflated (inflated brain of PySurfer)
* white
* sphere
"""
b_obj = BrainObj('B3')  # 'B1', 'B2', 'inflated', 'sphere', 'white'

"""By default, the brain is translucent but it can be turned to opaque
"""
# b_obj = BrainObj('B3', translucent=False)

"""You can also select a specific hemisphere
"""
# b_obj = BrainObj('B3', translucent=False, hemisphere='left')  # 'right'

"""For the inflated, white and translucent templates, sulcus can be also used
"""
# b_obj = BrainObj('inflated', translucent=False, hemisphere='right',
#                  sulcus=True)

"""Once the brain object created, pass it to the graphical user interface.
Ejemplo n.º 18
0
 def test_supported_format(self):
     """Test for input formats."""
     for k in ['X3D_FILE', 'GII_FILE', 'OBJ_FILE']:
         file = self.need_file(NEEDED_FILES[k])
         BrainObj(file)
Ejemplo n.º 19
0
    def __init__(self, canvas, **kwargs):
        """Init."""
        # Create a root node :
        self._vbNode = scene.Node(name='Brain')
        self._vbNode.transform = vist.STTransform(scale=[self._gl_scale] * 3)
        logger.debug("Brain rescaled " + str([self._gl_scale] * 3))
        PROFILER("Root node", level=1)

        # ========================= SOURCES =========================
        self.sources = CombineSources(kwargs.get('source_obj', None))
        if self.sources.name is None:
            self._obj_type_lst.model().item(4).setEnabled(False)
            # Disable menu :
            self.menuDispSources.setChecked(False)
            self.menuTransform.setEnabled(False)
        self.sources.parent = self._vbNode
        PROFILER("Sources object", level=1)

        # ========================= CONNECTIVITY =========================
        self.connect = CombineConnect(kwargs.get('connect_obj', None))
        if self.connect.name is None:
            self._obj_type_lst.model().item(5).setEnabled(False)
            self.menuDispConnect.setEnabled(False)
        self.connect.parent = self._vbNode
        PROFILER("Connect object", level=1)

        # ========================= TIME-SERIES =========================
        self.tseries = CombineTimeSeries(kwargs.get('time_series_obj', None))
        if self.tseries.name is None:
            self._obj_type_lst.model().item(6).setEnabled(False)
        self.tseries.parent = self._vbNode
        PROFILER("Time-series object", level=1)

        # ========================= PICTURES =========================
        self.pic = CombinePictures(kwargs.get('picture_obj', None))
        if self.pic.name is None:
            self._obj_type_lst.model().item(7).setEnabled(False)
        self.pic.parent = self._vbNode
        PROFILER("Pictures object", level=1)

        # ========================= VECTORS =========================
        self.vectors = CombineVectors(kwargs.get('vector_obj', None))
        if self.vectors.name is None:
            self._obj_type_lst.model().item(8).setEnabled(False)
        self.vectors.parent = self._vbNode
        PROFILER("Vectors object", level=1)

        # ========================= VOLUME =========================
        # ----------------- Volume -----------------
        if kwargs.get('vol_obj', None) is None:
            self.volume = VolumeObj('brodmann')
            self.volume.visible_obj = False
        else:
            self.volume = kwargs.get('vol_obj')
        if self.volume.name not in self.volume.list():
            self.volume.save(tmpfile=True)
        self.volume.parent = self._vbNode
        PROFILER("Volume object", level=1)
        # ----------------- ROI -----------------
        if kwargs.get('roi_obj', None) is None:
            self.roi = RoiObj('brodmann')
            self.roi.visible_obj = False
        else:
            self.roi = kwargs.get('roi_obj')
        if self.roi.name not in self.roi.list():
            self.roi.save(tmpfile=True)
        self.roi.parent = self._vbNode
        PROFILER("ROI object", level=1)
        # ----------------- Cross-sections -----------------
        if kwargs.get('cross_sec_obj', None) is None:
            self.cross_sec = CrossSecObj('brodmann')
        else:
            self.cross_sec = kwargs.get('cross_sec_obj')
        if self.cross_sec.name not in self.cross_sec.list():
            self.cross_sec.save(tmpfile=True)
        self.cross_sec.visible_obj = False
        self.cross_sec.text_size = 2.
        self.cross_sec.parent = self._csView.wc.scene
        self._csView.camera = self.cross_sec._get_camera()
        self.cross_sec.set_shortcuts_to_canvas(self._csView)
        PROFILER("Cross-sections object", level=1)

        # ========================= BRAIN =========================
        if kwargs.get('brain_obj', None) is None:
            self.atlas = BrainObj('B1')
        else:
            self.atlas = kwargs['brain_obj']
        if self.atlas.name not in self.atlas.list():
            self.atlas.save(tmpfile=True)
        self.atlas.scale = self._gl_scale
        self.atlas.parent = self._vbNode
        PROFILER("Brain object", level=1)
Ejemplo n.º 20
0
###############################################################################
# .. note::
#     Here, we used s_obj.project_sources(b_obj) to project source's activity
#     on the surface. We could also have used to b_obj.project_sources(s_obj)

###############################################################################
# Parcellize the brain
###############################################################################
# Here, we parcellize the brain (using all parcellated included in the file).
# Note that those parcellates files comes from MNE-python.

# Download the annotation file of the left hemisphere lh.aparc.a2009s.annot
path_to_file1 = download_file('lh.aparc.a2009s.annot', astype='example_data')
# Define the brain object (now you should know how to do it)
b_obj_parl = BrainObj('inflated', hemisphere='left', translucent=False)
# Print parcellates included in the file
# print(b_obj_parl.get_parcellates(path_to_file1))
# Finally, parcellize the brain and add the brain to the scene
b_obj_parl.parcellize(path_to_file1)
sc.add_to_subplot(b_obj_parl, row=1, col=1, rotate='left',
                  title='Parcellize using the Desikan Atlas', **KW)

###############################################################################
# .. note::
#     Those annotations files from MNE-python are only compatibles with the
#     inflated, white and sphere templates

###############################################################################
# Send data to parcellates
###############################################################################
Ejemplo n.º 21
0
 def test_remove(self):
     """Test function remove."""
     BrainObj('Custom').remove()
     clean_tmp()
Ejemplo n.º 22
0
    s_obj_11 = SourceObj('iEEG', xyz11, data=data11, cmap=cmap)
    s_obj_11.color_sources(data=data11)
    s_obj_12 = SourceObj('iEEG', xyz12, data=data12, cmap=cmap)
    s_obj_12.color_sources(data=data12)
    s_obj_13 = SourceObj('iEEG', xyz13, data=data13, cmap=cmap)
    s_obj_13.color_sources(data=data13)
    s_obj_14 = SourceObj('iEEG', xyz14, data=data14, cmap=cmap)
    s_obj_14.color_sources(data=data14)
    s_obj_15 = SourceObj('iEEG', xyz15, data=data15, cmap=cmap)
    s_obj_15.color_sources(data=data15)

    s_obj_all = s_obj_1 + s_obj_2 + s_obj_3 + s_obj_4 + s_obj_5 + s_obj_6 + s_obj_7 + s_obj_8 + s_obj_9 + s_obj_10 + \
                s_obj_11 + s_obj_12 + s_obj_13 + s_obj_14 + s_obj_15

    b_obj_proj_left = BrainObj(template_brain,
                               hemisphere='left',
                               translucent=False)
    b_obj_proj_left.project_sources(s_obj_all, clim=(0, 16), cmap=cmap)
    sc.add_to_subplot(b_obj_proj_left,
                      row=0,
                      col=0,
                      rotate='left',
                      use_this_cam=True)

    b_obj_proj_left = BrainObj(template_brain,
                               hemisphere='left',
                               translucent=False)
    b_obj_proj_left.project_sources(s_obj_all, clim=(0, 16), cmap=cmap)
    sc.add_to_subplot(b_obj_proj_left,
                      row=0,
                      col=1,
Ejemplo n.º 23
0
#     projection is a good way to plot results across subjects. To illustrate
#     this feature, we provide a set of intracranial MNI coordinates.

# Download iEEG coordinates and define some random data
mat = np.load(download_file('xyz_sample.npz', astype='example_data'))
xyz, subjects = mat['xyz'], mat['subjects']
data = np.random.rand(xyz.shape[0])

###############################################################################
# Basic brain using MNI template
###############################################################################
# By default, Visbrain include several MNI brain templates (B1, B3, B3,
# inflated, white and shere).

# Translucent inflated BrainObj with both hemispheres displayed
b_obj_fs = BrainObj('inflated', translucent=True, hemisphere='both')
# Add the brain to the scene. Note that `row_span` means that the plot will
# occupy two rows (row 0 and 1)
sc.add_to_subplot(b_obj_fs,
                  row=0,
                  col=0,
                  row_span=2,
                  title='Translucent inflated brain template',
                  **KW)

###############################################################################
# Select the left or the right hemisphere
###############################################################################
# You can use the `hemisphere` input to select either the 'left', 'right' or
# 'both' hemispheres.
Ejemplo n.º 24
0
"""
Display fMRI activation
=======================

Display fMRI activations from a nii.gz file (NiBabel required).

See the original example :

https://pysurfer.github.io/auto_examples/plot_fmri_activation.html#sphx-glr-auto-examples-plot-fmri-activation-py

.. image:: ../../_static/examples/ex_eegmeg_fmri_activations.png
"""
from visbrain.gui import Brain
from visbrain.objects import BrainObj
from visbrain.io import download_file
"""Download file if needed
"""
file = download_file('lh.sig.nii.gz', astype='example_data')

b_obj = BrainObj('inflated', translucent=False, sulcus=True)
b_obj.add_activation(file=file,
                     clim=(5., 20.),
                     hide_under=5,
                     cmap='viridis',
                     hemisphere='left')

vb = Brain(brain_obj=b_obj)
vb.rotate('left')
vb.show()
Ejemplo n.º 25
0
#if you want all connec to be same color use - custom_colors = {None: "green"}
# Then, we define the sourcess
#node size and color
# s_obj = SourceObj('sources', nodes, radius_min=5., color="red")
# black color nodes = color='#000000'
#title
# sc.add_to_subplot(c_default, row=0, col=0, zoom=0.1)
# sc.add_to_subplot(c_default1, row=0, col=0, zoom=0.1)
# sc.add_to_subplot(c_default2, row=0, col=0, zoom=0.1)
# sc.add_to_subplot(c_default3, row=0, col=0, zoom=0.1)
# sc.add_to_subplot(c_default4, row=0, col=0, zoom=0.1)

# And add connect, source and brain objects to the scene
# sc.add_to_subplot(s_obj, row=0, col=0, zoom=0.1)
b_obj = BrainObj('B2')
sc.add_to_subplot(b_obj,row=0, col=0, use_this_cam=True)
#, use_this_cam=True
# from visbrain.objects import ColorbarObj
# cb = ColorbarObj(c_default, **CBAR_STATE)
# sc.add_to_subplot(cb, width_max=200, row=0, col=1)

  # clim=(4., 78.2), vmin=10.,
  #                 vmax=72., cblabel='Colorbar title', under='gray',
  #                 over='red', txtcolor='black', cbtxtsz=40, cbtxtsh=2.,
  #                 txtsz=20., width=.04)

sc.screenshot('plain_brain.png', transparent=True)
sc.preview()
#sc.screenshot("test.jpg")
Ejemplo n.º 26
0
https://pysurfer.github.io/auto_examples/plot_meg_inverse_solution.html#sphx-glr-auto-examples-plot-meg-inverse-solution-py

.. image:: ../../_static/examples/ex_eegmeg_meg_inverse.png
"""
from visbrain.gui import Brain
from visbrain.objects import BrainObj
from visbrain.io import download_file, read_stc

"""Download file if needed :
"""
stc_file = download_file('meg_source_estimate-lh.stc', astype='example_data')

# Read the *.stc file :
file = read_stc(stc_file)

# Get the data and vertices from the file :
data = file['data'][:, 2]
vertices = file['vertices']

# Define a brain object and add the data to the mesh :
b_obj = BrainObj('inflated', translucent=False, hemisphere='left')
b_obj.add_activation(data=data, vertices=vertices, smoothing_steps=15,
                     clim=(13., 22.), hide_under=13., cmap='plasma',
                     hemisphere='left')

# Finally, pass the brain object to the Brain module :
vb = Brain(brain_obj=b_obj)
vb.rotate('left')
vb.show()
Ejemplo n.º 27
0
 def test_save(self):
     """Test function save."""
     b_cust = BrainObj('Custom', vertices=vertices, faces=faces)
     b_cust.save()
     b_cust_tmp = BrainObj('CustomTmp', vertices=vertices, faces=faces)
     b_cust_tmp.save(tmpfile=True)
Ejemplo n.º 28
0
"""Create the source object. If you want to previsualize the result without
opening Brain, use s_obj.preview()
"""
s_obj = SourceObj('SourceExample', xyz, **kwargs)
# s_obj.preview()
"""Color sources according to the data
"""
# s_obj.color_sources(data=kwargs['data'], cmap='viridis')
"""Colorbar properties
"""
cb_kw = dict(
    cblabel="Project source activity",
    cbtxtsz=3.,
    border=False,
)
"""Define a brain object with the B3 template and project source's activity
onto the surface
"""
b_obj = BrainObj('B3', **cb_kw)
b_obj.project_sources(s_obj,
                      cmap='viridis',
                      vmin=50.,
                      under='orange',
                      vmax=550.,
                      over='darkred')
"""Create a Brain instance and pass both of the brain and source object defined
After the interface is opened, press C to display the colorbar.
"""
vb = Brain(source_obj=s_obj, brain_obj=b_obj)
vb.show()
Ejemplo n.º 29
0
 def test_reload_saved_template(self):
     """Test function reload_saved_template."""
     BrainObj('Custom')
     BrainObj('CustomTmp')
Ejemplo n.º 30
0
sc = SceneObj(size=(1000, 1000), bgcolor=(1, 1, 1))

views = ["left", 'top']

for i_v, view in enumerate(views):
    for nf, freq_band_name in enumerate(freq_band_names):
        res_path = op.join(
            data_path, graph_analysis_name,
            "graph_den_pipe_den_" + str(con_den).replace(".", "_"),
            "_freq_band_name_" + freq_band_name + "_subject_id_sub-0003")

        lol_file = op.join(res_path, "community_rada", "Z_List.lol")
        net_file = op.join(res_path, "prep_rada", "Z_List.net")

        b_obj = BrainObj("B1", translucent=True)
        sc.add_to_subplot(b_obj,
                          row=nf,
                          col=i_v,
                          use_this_cam=True,
                          rotate=view,
                          title=("Modules for {} band".format(freq_band_name)),
                          title_size=14,
                          title_bold=True,
                          title_color='black')

        c_obj, s_obj = visu_graph_modules(lol_file=lol_file,
                                          net_file=net_file,
                                          coords_file=coords_file,
                                          inter_modules=False)
Ejemplo n.º 31
0
 def test_definition(self):
     """Test function definition."""
     BrainObj('inflated', sulcus=True)
     # Test default templates :
     for k, i in zip(['B1', 'B2', 'B3'], ['left', 'both', 'right']):
         b_obj.set_data(name=k, hemisphere=i)
Ejemplo n.º 32
0
from visbrain.gui import Brain
from visbrain.objects import BrainObj
from visbrain.io import download_file

file1 = 'lh.aparc.a2009s.annot'
file2 = 'rh.aparc.annot'

# Download files if needed :
path_to_file1 = download_file(file1, astype='example_data')
path_to_file2 = download_file(file2, astype='example_data')

# Define a brain object :
b_obj = BrainObj('inflated',
                 hemisphere='both',
                 translucent=False,
                 cblabel='Parcellates example',
                 cbtxtsz=4.)
"""Parcellize the left hemisphere using the Destrieux Atlas. By default, no
parcellates are selected
"""
b_obj.parcellize(path_to_file1, hemisphere='left')
"""If you want to get the list of all predefined parcellates, use the
`get_parcellates` method which returns a pandas DataFrame with the index, the
name and the color associated to each parcellates
"""
df = b_obj.get_parcellates(path_to_file2)
# print(df)
"""Select only some parcellates. Note that this parcellization is using an
other atlas (Desikan-Killiany atlas)
"""
Ejemplo n.º 33
0
 def test_custom_templates(self):
     """Test passing vertices, faces and normals."""
     BrainObj('Custom', vertices=vertices, faces=faces)
     BrainObj('Custom', vertices=vertices, faces=faces, normals=normals)
Ejemplo n.º 34
0
s_obj_2.color_sources(data=data2)
s_obj_3 = SourceObj('iEEG', xyz3, data=data3, cmap=cmap)
s_obj_3.color_sources(data=data3)
s_obj_4 = SourceObj('iEEG', xyz4, data=data4, cmap=cmap)
s_obj_4.color_sources(data=data4)
s_obj_5 = SourceObj('iEEG', xyz5, data=data5, cmap=cmap)
s_obj_5.color_sources(data=data5)
s_obj_6 = SourceObj('iEEG', xyz6, data=data6, cmap=cmap)
s_obj_6.color_sources(data=data6)

#s_obj_all = s_obj_1 + s_obj_2 + s_obj_3 + s_obj_4+ s_obj_5 + s_obj_6 + s_obj_7

s_obj_all = s_obj_6 + s_obj_5 + s_obj_4 + s_obj_3 + s_obj_2 + s_obj_1

b_obj_proj_left = BrainObj(template_brain,
                           hemisphere='left',
                           translucent=False)
b_obj_proj_left.project_sources(s_obj_all, clim=(1, 7), cmap='viridis_spliced')
sc.add_to_subplot(b_obj_proj_left,
                  row=0,
                  col=0,
                  rotate='left',
                  use_this_cam=True)

b_obj_proj_left = BrainObj(template_brain,
                           hemisphere='left',
                           translucent=False)
b_obj_proj_left.project_sources(s_obj_all, clim=(1, 7), cmap='viridis_spliced')
sc.add_to_subplot(b_obj_proj_left,
                  row=1,
                  col=0,
Ejemplo n.º 35
0
 def test_overlay_from_file(self):
     """Test add_activation method."""
     # Prepare the brain :
     b_obj = BrainObj('inflated')
     file_1 = self.need_file(NEEDED_FILES['OVERLAY_1'])
     file_2 = self.need_file(NEEDED_FILES['OVERLAY_2'])
     # NIFTI Overlay :
     b_obj.add_activation(file=file_1,
                          clim=(4., 30.),
                          hide_under=4,
                          cmap='Reds_r',
                          hemisphere='left')
     b_obj.add_activation(file=file_2,
                          clim=(4., 30.),
                          hide_under=4,
                          cmap='Blues_r',
                          hemisphere='left',
                          n_contours=10)
     # Meg inverse :
     file_3 = read_stc(self.need_file(NEEDED_FILES['MEG_INVERSE']))
     data = file_3['data'][:, 2]
     vertices = file_3['vertices']
     b_obj.add_activation(data=data, vertices=vertices, smoothing_steps=3)
     b_obj.add_activation(data=data,
                          vertices=vertices,
                          smoothing_steps=5,
                          clim=(13., 22.),
                          hide_under=13.,
                          cmap='plasma')
     # GII overlays :
     gii = self.need_file(NEEDED_FILES['GII_FILE'])
     gii_overlay = self.need_file(NEEDED_FILES['GII_OVERLAY'])
     b_gii = BrainObj(gii)
     b_gii.add_activation(file=gii_overlay)
Ejemplo n.º 36
0
import numpy as np

from visbrain.gui import Brain
from visbrain.objects import BrainObj
from visbrain.io import download_file


file1 = 'lh.aparc.a2009s.annot'
file2 = 'rh.aparc.annot'

# Download files if needed :
path_to_file1 = download_file(file1, astype='example_data')
path_to_file2 = download_file(file2, astype='example_data')

# Define a brain object :
b_obj = BrainObj('inflated', hemisphere='both', translucent=False,
                 cblabel='Parcellates example', cbtxtsz=4.)

"""Parcellize the left hemisphere using the Destrieux Atlas. By default, no
parcellates are selected
"""
b_obj.parcellize(path_to_file1, hemisphere='left')

"""If you want to get the list of all predefined parcellates, use the
`get_parcellates` method which returns a pandas DataFrame with the index, the
name and the color associated to each parcellates
"""
df = b_obj.get_parcellates(path_to_file2)
# print(df)

"""Select only some parcellates. Note that this parcellization is using an
other atlas (Desikan-Killiany atlas)
Ejemplo n.º 37
0
    def parcellize_brain(self,
                         path_to_file1=None,
                         path_to_file2=None,
                         cmap="videen_style"):
        # Here, we parcellize the brain (using all parcellated included in the file).
        # Note that those parcellates files comes from MNE-python.

        # Download the annotation file of the left hemisphere lh.aparc.a2009s.annot
        if path_to_file1 == None:
            path_to_file1 = download_file('lh.aparc.annot',
                                          astype='example_data')
        # Define the brain object (now you should know how to do it)
        b_obj_parl = BrainObj('inflated', hemisphere='left', translucent=False)
        # From the list of printed parcellates, we only select a few of them
        select_par = [
            b
            for b in b_obj_parl.get_parcellates(path_to_file1)['Labels'].values
            if b not in
            ["unknown", "corpuscallosum", "FreeSurfer_Defined_Medial_Wall"]
        ]
        print("Selected parcelations:", select_par)
        # Now we define some data for each parcellates (one value per pacellate)
        #data_par = self.data[0:34]
        data_par = self.data[0:7]

        # Finally, parcellize the brain and add the brain to the scene
        b_obj_parl.parcellize(
            path_to_file1,
            select=select_par,
            hemisphere='left',
            cmap=cmap,
            data=data_par,
            clim=[self.min_ji, self.max_ji],
            #cmap='videen_style', data=data_par, clim=[self.min_ji, self.max_ji],
            vmin=self.min_ji,
            vmax=self.max_ji,
            under='lightgray',
            over='darkred')
        self.sc.add_to_subplot(b_obj_parl,
                               row=0,
                               col=0,
                               col_span=3,
                               rotate='left',
                               title='Left Hemisphere',
                               **self.KW)

        # Again, we download an annotation file, but this time for the right hemisphere

        # Download the annotation file of the right hemisphere rh.aparc.annot
        if path_to_file2 == None:
            path_to_file2 = download_file('rh.aparc.annot',
                                          astype='example_data')
        # Define the brain object (again... I know, this is redundant)
        b_obj_parr = BrainObj('inflated',
                              hemisphere='right',
                              translucent=False)
        print(b_obj_parr)

        select_par = [
            b
            for b in b_obj_parr.get_parcellates(path_to_file2)['Labels'].values
            if b not in
            ["unknown", "corpuscallosum", "FreeSurfer_Defined_Medial_Wall"]
        ]
        print("Selected parcelations:", select_par)
        #data_par = self.data[49:-1]
        data_par = self.data[7:]

        b_obj_parr.parcellize(
            path_to_file2,
            select=select_par,
            hemisphere='right',
            cmap=cmap,
            data=data_par,
            clim=[self.min_ji, self.max_ji],
            #cmap='videen_style', data=data_par, clim=[self.min_ji, self.max_ji],
            vmin=self.min_ji,
            vmax=self.max_ji,
            under='lightgray',
            over='darkred')

        # Add the brain object to the scene
        self.sc.add_to_subplot(b_obj_parr,
                               row=0,
                               col=4,
                               col_span=3,
                               rotate='right',
                               title='Right Hemisphere',
                               **self.KW)
        # Get the colorbar of the brain object and add it to the scene
        cb_parr = ColorbarObj(b_obj_parl,
                              cblabel='Feedback Inhibitory Synaptic Coupling',
                              **self.CBAR_STATE)
        #self.sc.add_to_subplot(cb_parr, row=0, col=3, width_max=2000)
        self.b_obj_parl = b_obj_parl
        self.path_to_file1 = path_to_file1
        self.b_obj_parr = b_obj_parr
        self.path_to_file2 = path_to_file2
Ejemplo n.º 38
0
from visbrain.io import read_stc, clean_tmp

NEEDED_FILES = dict(ANNOT_FILE_1='lh.aparc.annot',
                    ANNOT_FILE_2='rh.aparc.annot',
                    MEG_INVERSE='meg_source_estimate-lh.stc',
                    OVERLAY_1='lh.sig.nii.gz',
                    OVERLAY_2='lh.alt_sig.nii.gz',
                    PARCELLATES_1='lh.aparc.a2009s.annot',
                    PARCELLATES_2='rh.aparc.annot',
                    X3D_FILE='ferret.x3d',
                    GII_FILE='lh.bert.inflated.gii',
                    GII_OVERLAY='lh.bert.thickness.gii',
                    OBJ_FILE='brain.obj')

# BRAIN :
b_obj = BrainObj('B1')
n_vertices, n_faces = 100, 50
vertices_x3 = 20. * np.random.rand(n_vertices, 3, 3)
vertices = 20. * np.random.rand(n_vertices, 3)
normals = (vertices >= 0).astype(float)
faces = np.random.randint(0, n_vertices, (n_faces, 3))

# SOURCES :
xyz = np.random.uniform(-20, 20, (50, 3))
mask = xyz[:, 0] > 10
s_obj = SourceObj('xyz', xyz, mask=mask)


class TestBrainObj(_TestObjects):
    """Test BrainObj."""
Ejemplo n.º 39
0
# Scene creation
###############################################################################

CAM_STATE = dict(azimuth=0,        # azimuth angle
                 elevation=90,     # elevation angle
                 )
CBAR_STATE = dict(cbtxtsz=12, txtsz=10., width=.1, cbtxtsh=3.,
                  rect=(-.3, -2., 1., 4.))
sc = SceneObj(camera_state=CAM_STATE, size=(1400, 1000))

###############################################################################
# fMRI activation
###############################################################################

file = download_file('lh.sig.nii.gz', astype='example_data')
b_obj_fmri = BrainObj('inflated', translucent=False, sulcus=True)
b_obj_fmri.add_activation(file=file, clim=(5., 20.), hide_under=5,
                          cmap='viridis', hemisphere='left')
sc.add_to_subplot(b_obj_fmri, row=0, col=0, row_span=2,
                  title='fMRI activation', rotate='top')

###############################################################################
# Region Of Interest (ROI)
###############################################################################

roi_aal = RoiObj('aal')
roi_aal.select_roi(select=[29, 30], unique_color=True, smooth=11)
sc.add_to_subplot(roi_aal, row=0, col=1, title='Region Of Interest (ROI)')
sc.add_to_subplot(BrainObj('B1'), use_this_cam=True, row=0, col=1)

###############################################################################
Ejemplo n.º 40
0
from visbrain.objects import BrainObj, VectorObj
from visbrain.io import read_stc, download_file

"""Download file if needed
"""
stc_file = download_file('meg_source_estimate-lh.stc', astype='example_data')

# Read the *.stc file :
file = read_stc(stc_file)

# Get the data and vertices from the file :
data = file['data'][:, 2]
vertices = file['vertices']

# Define a brain object and add the data to the mesh :
b_obj = BrainObj('white', translucent=True, hemisphere='left')
b_obj.add_activation(data=data, vertices=vertices, smoothing_steps=5,
                     clim=(7., 21.), hide_under=7., cmap='viridis')

# Build arrows :
dt = np.dtype([('vertices', float, 3), ('normals', float, 3)])
arrows = np.zeros(len(data), dtype=dt)
arrows['vertices'] = b_obj.vertices[vertices, :]
arrows['normals'] = b_obj.normals[vertices, :]
select = data >= 7.

# Define the vector object :
v_obj = VectorObj('vector', arrows, data=data, inferred_data=True,
                  clim=(7., 21.), antialias=True, cmap='viridis',
                  select=select, line_width=2., arrow_coef=1.2,
                  dynamic=(.2, 1.))