コード例 #1
0
ファイル: test_mneio.py プロジェクト: EtienneCmb/visbrain
 def test_mne_switch(self):
     """Test function mne_switch."""
     # Download sleep file :
     sleep_file = path_to_visbrain_data('excerpt2.edf', 'example_data')
     file, ext = os.path.splitext(sleep_file)
     if not os.path.isfile(sleep_file):
         download_file('sleep_edf.zip', unzip=True, astype='example_data')
     to_exclude = ['VAB', 'NAF2P-A1', 'PCPAP', 'POS', 'FP2-A1', 'O2-A1',
                   'CZ2-A1', 'event_pneumo', 'event_pneumo_aut']
     kwargs = dict(exclude=to_exclude, stim_channel=False)
     mne_switch(file, ext, 100., preload=True, **kwargs)
コード例 #2
0
 def test_mne_switch(self):
     """Test function mne_switch."""
     # Download sleep file :
     sleep_file = path_to_visbrain_data('excerpt2.edf')
     if not os.path.isfile(sleep_file):
         download_file('sleep_edf.zip', unzip=True)
     to_exclude = ['VAB', 'NAF2P-A1', 'PCPAP', 'POS', 'FP2-A1', 'O2-A1',
                   'CZ2-A1', 'event_pneumo', 'event_pneumo_aut']
     kwargs = dict(exclude=to_exclude, stim_channel=False)
     mne_switch(path_to_visbrain_data('excerpt2'), '.edf', 100.,
                preload=True, **kwargs)
コード例 #3
0
ファイル: videos.py プロジェクト: BIAPT/NeuroAlgo
def create_graph_video(data, output_path, hemisphere, rotation):
    width = 534
    height = 748
    FPS = 1
    seconds = 5

    # Here we 'get the data'
    mat = np.load(download_file('xyz_sample.npz', astype='example_data'))
    data = mat['xyz']

    # get the number of frames
    num_frames = 5
    filename = 'temp.jpg'
    fourcc = VideoWriter_fourcc(*'MP42')
    video = VideoWriter('./test.avi', fourcc, float(FPS), (width, height))
    for i in range(0, num_frames):
        print(i)
        # this will be creating a temp.png file for the creation of video
        create_graph_picture(filename, data, hemisphere, rotation)
        frame = imread(filename)
        print(frame.shape)

        video.write(frame)
    video.release()
コード例 #4
0
ファイル: test_roi_obj.py プロジェクト: EtienneCmb/visbrain
from visbrain.objects.tests._testing_objects import _TestVolumeObject
from visbrain.objects import SourceObj, RoiObj
from visbrain.io import (download_file, path_to_visbrain_data, read_nifti,
                         clean_tmp)


roi_obj = RoiObj('brodmann')
roi_obj.select_roi([4, 6])
rnd = np.random.RandomState(0)
xyz = 100. * rnd.rand(50, 3)
xyz[:, 0] -= 50.
xyz[:, 1] -= 50.
s_obj = SourceObj('S1', xyz)

download_file('MIST_ROI.zip', unzip=True, astype='example_data')
nifti_file = path_to_visbrain_data('MIST_ROI.nii.gz', 'example_data')
csv_file = path_to_visbrain_data('MIST_ROI.csv', 'example_data')
# Read the .csv file :
arr = np.genfromtxt(csv_file, delimiter=';', dtype=str)
# Get column names, labels and index :
column_names = arr[0, :]
arr = np.delete(arr, 0, 0)
n_roi = arr.shape[0]
roi_index = arr[:, 0].astype(int)
roi_labels = arr[:, [1, 2]].astype(object)
# Build the struct array :
label = np.zeros(n_roi, dtype=[('label', object), ('name', object)])
label['label'] = roi_labels[:, 0]
label['name'] = roi_labels[:, 1]
# Get the volume and the hdr transformation :
コード例 #5
0
ファイル: test_sleep.py プロジェクト: CoastSunny/visbrain
import numpy as np
from vispy.app.canvas import MouseEvent, KeyEvent
from vispy.util.keys import Key

from visbrain import Sleep
from visbrain.io import download_file, path_to_visbrain_data
from visbrain.tests._tests_visbrain import _TestVisbrain

# File to load :
sleep_file = path_to_visbrain_data('excerpt2.edf')
hypno_file = path_to_visbrain_data('Hypnogram_excerpt2.txt')

# Download sleep file :
if not os.path.isfile(sleep_file):
    download_file('sleep_edf.zip', unzip=True)
onset = np.array([100, 2000, 5000])

# Create Sleep application :
sp = Sleep(data=sleep_file, hypno=hypno_file, axis=True, annotations=onset)


class TestSleep(_TestVisbrain):
    """Test sleep.py."""

    ###########################################################################
    #                                TOOLS
    ###########################################################################
    def test_reference_switch(self):
        """Test function reference_switch."""
        for k in [2]:  # range(3)
コード例 #6
0
This example illustrate how to define a custom brain template using your own
vertices and faces.

.. image:: ../../_static/examples/ex_add_brain_template.png
"""
import numpy as np

from visbrain.gui import Brain
from visbrain.objects import BrainObj
from visbrain.io import download_file

"""Download and the load the Custom.npz archive. This file contains vertices
and faces of a brain template that is not integrated by default in Visbrain.
"""
mat = np.load(download_file('Custom.npz', astype='example_data'))

"""Get vertices and faces from the archive.

In this examples, normals are also present in the archive. If you don't have
the normals, the BrainObj will compute it automatically.
"""
vert, faces, norms = mat['vertices'], mat['faces'], mat['normals']

"""Define the brain object
"""
b_obj = BrainObj('Custom', vertices=vert, faces=faces, normals=norms)

"""Then you have two strategies :
* If you are going to use this template a lot and don't want to redefine it
  every times, use `b_obj.save()`. Once the object saved, it can be reloaded
コード例 #7
0
ファイル: PlotBrain.py プロジェクト: pedrocklein/toolbox
    def parcellize_brain(self,
                         path_to_file1=None,
                         path_to_file2=None,
                         cmap="videen_style"):
        # Here, we parcellize the brain (using all parcellated included in the file).
        # Note that those parcellates files comes from MNE-python.

        # Download the annotation file of the left hemisphere lh.aparc.a2009s.annot
        if path_to_file1 == None:
            path_to_file1 = download_file('lh.aparc.annot',
                                          astype='example_data')
        # Define the brain object (now you should know how to do it)
        b_obj_parl = BrainObj('inflated', hemisphere='left', translucent=False)
        # From the list of printed parcellates, we only select a few of them
        select_par = [
            b
            for b in b_obj_parl.get_parcellates(path_to_file1)['Labels'].values
            if b not in
            ["unknown", "corpuscallosum", "FreeSurfer_Defined_Medial_Wall"]
        ]
        print("Selected parcelations:", select_par)
        # Now we define some data for each parcellates (one value per pacellate)
        #data_par = self.data[0:34]
        data_par = self.data[0:7]

        # Finally, parcellize the brain and add the brain to the scene
        b_obj_parl.parcellize(
            path_to_file1,
            select=select_par,
            hemisphere='left',
            cmap=cmap,
            data=data_par,
            clim=[self.min_ji, self.max_ji],
            #cmap='videen_style', data=data_par, clim=[self.min_ji, self.max_ji],
            vmin=self.min_ji,
            vmax=self.max_ji,
            under='lightgray',
            over='darkred')
        self.sc.add_to_subplot(b_obj_parl,
                               row=0,
                               col=0,
                               col_span=3,
                               rotate='left',
                               title='Left Hemisphere',
                               **self.KW)

        # Again, we download an annotation file, but this time for the right hemisphere

        # Download the annotation file of the right hemisphere rh.aparc.annot
        if path_to_file2 == None:
            path_to_file2 = download_file('rh.aparc.annot',
                                          astype='example_data')
        # Define the brain object (again... I know, this is redundant)
        b_obj_parr = BrainObj('inflated',
                              hemisphere='right',
                              translucent=False)
        print(b_obj_parr)

        select_par = [
            b
            for b in b_obj_parr.get_parcellates(path_to_file2)['Labels'].values
            if b not in
            ["unknown", "corpuscallosum", "FreeSurfer_Defined_Medial_Wall"]
        ]
        print("Selected parcelations:", select_par)
        #data_par = self.data[49:-1]
        data_par = self.data[7:]

        b_obj_parr.parcellize(
            path_to_file2,
            select=select_par,
            hemisphere='right',
            cmap=cmap,
            data=data_par,
            clim=[self.min_ji, self.max_ji],
            #cmap='videen_style', data=data_par, clim=[self.min_ji, self.max_ji],
            vmin=self.min_ji,
            vmax=self.max_ji,
            under='lightgray',
            over='darkred')

        # Add the brain object to the scene
        self.sc.add_to_subplot(b_obj_parr,
                               row=0,
                               col=4,
                               col_span=3,
                               rotate='right',
                               title='Right Hemisphere',
                               **self.KW)
        # Get the colorbar of the brain object and add it to the scene
        cb_parr = ColorbarObj(b_obj_parl,
                              cblabel='Feedback Inhibitory Synaptic Coupling',
                              **self.CBAR_STATE)
        #self.sc.add_to_subplot(cb_parr, row=0, col=3, width_max=2000)
        self.b_obj_parl = b_obj_parl
        self.path_to_file1 = path_to_file1
        self.b_obj_parr = b_obj_parr
        self.path_to_file2 = path_to_file2
コード例 #8
0
===================

Display topographic plots in a grid using several plotting properties.

Download topoplot data (topoplot_data.npz) :
https://www.dropbox.com/s/m76y3p0fyj6lxht/topoplot_data.npz?dl=1

.. image:: ../../picture/pictopo/ex_topoplot_plotting_properties.png
"""
import numpy as np

from visbrain import Topo
from visbrain.io import download_file, path_to_visbrain_data

# Load the data :
download_file('topoplot_data.npz')
mat = np.load(path_to_visbrain_data('topoplot_data.npz'))
xyz, data = mat['xyz'], mat['data']
channels = [str(k) for k in range(len(data))]

kwargs = {'title_size': 3., 'cb_txt_size': 2, 'margin': 15 / 100,
          'chan_offset': (0., 1.1, 0.), 'chan_size': 1.5}

# Create a topoplot instance :
t = Topo()

# Topoplot with 10 regulary spaced levels :
t.add_topoplot('Topo_1', data, xyz=xyz, channels=channels,
               title='Regulary spaced levels', cmap='viridis', levels=10,
               level_colors='Spectral_r', cblabel='Beta power',
               title_color='#ab4642', **kwargs)
コード例 #9
0
See https://surfer.nmr.mgh.harvard.edu/fswiki/CorticalParcellation for files
used in this example.

.. image:: ../../_static/examples/ex_parcellates.png
"""
import numpy as np

from visbrain.gui import Brain
from visbrain.objects import BrainObj
from visbrain.io import download_file

file1 = 'lh.aparc.a2009s.annot'
file2 = 'rh.aparc.annot'

# Download files if needed :
path_to_file1 = download_file(file1, astype='example_data')
path_to_file2 = download_file(file2, astype='example_data')

# Define a brain object :
b_obj = BrainObj('inflated',
                 hemisphere='both',
                 translucent=False,
                 cblabel='Parcellates example',
                 cbtxtsz=4.)
"""Parcellize the left hemisphere using the Destrieux Atlas. By default, no
parcellates are selected
"""
b_obj.parcellize(path_to_file1, hemisphere='left')
"""If you want to get the list of all predefined parcellates, use the
`get_parcellates` method which returns a pandas DataFrame with the index, the
name and the color associated to each parcellates
コード例 #10
0
"""
Get sleep statistics
====================

Get sleep statictics such as sleep stages duration, duration of the hypnogram.
"""
from visbrain.io import download_file, get_sleep_stats

###############################################################################
# Hypnogram data
###############################################################################
# Download a hypnogram example

path_to_hypno = download_file("s101_jbe.hyp", astype='example_data')

###############################################################################
# Get sleep statistics
###############################################################################
# Sleep statistics are going to be printed in the terminal and then saved in a
# `my_stats.csv`

get_sleep_stats(path_to_hypno, output_file='my_stats.csv')
コード例 #11
0
ファイル: plot_brain_obj.py プロジェクト: EtienneCmb/visbrain
# Colorbar default arguments. See `visbrain.objects.ColorbarObj`
CBAR_STATE = dict(cbtxtsz=12, txtsz=10., width=.1, cbtxtsh=3.,
                  rect=(-.3, -2., 1., 4.))
KW = dict(title_size=14., zoom=1.2)

###############################################################################
# .. note::
#     The BrainObj can interact with sources (SourceObj). For example, if the
#     source object represent intracranial data (e.g iEEG) those sources can
#     be projected on the surface of the brain. This is an important feature
#     because intracranial implantations is usually subject dependant and the
#     projection is a good way to plot results across subjects. To illustrate
#     this feature, we provide a set of intracranial MNI coordinates.

# Download iEEG coordinates and define some random data
mat = np.load(download_file('xyz_sample.npz', astype='example_data'))
xyz, subjects = mat['xyz'], mat['subjects']
data = np.random.rand(xyz.shape[0])


###############################################################################
# Basic brain using MNI template
###############################################################################
# By default, Visbrain include several MNI brain templates (B1, B3, B3,
# inflated, white and shere).

# Translucent inflated BrainObj with both hemispheres displayed
b_obj_fs = BrainObj('inflated', translucent=True, hemisphere='both')
# Add the brain to the scene. Note that `row_span` means that the plot will
# occupy two rows (row 0 and 1)
sc.add_to_subplot(b_obj_fs, row=0, col=0, row_span=2,
コード例 #12
0
 def test_nii_definition(self):
     """Test function nii_definition."""
     VolumeObj(download_file('GG-853-GM-0.7mm.nii.gz',
                             astype='example_data'))
コード例 #13
0
and alpha power for each source. We are going to display the thalamus, then
project the source's activity on it.

.. image:: ../../_static/examples/ex_region_of_interest.png
"""
from __future__ import print_function
import numpy as np

from visbrain.gui import Brain
from visbrain.objects import BrainObj, SourceObj, RoiObj
from visbrain.io import download_file

"""Download the location of sources closed to the thalamus and the power of
alpha oscillations
"""
thalamus_xyz = download_file('thalamus.txt', astype='example_data')
thalamus_data = download_file('Px.npy', astype='example_data')
s_xyz = np.loadtxt(thalamus_xyz)
s_data = np.load(thalamus_data).mean(1)

"""Create a source object
"""
s_obj = SourceObj('ThalamusSources', s_xyz, data=s_data, color='#ab4642',
                  radius_min=10., radius_max=20.)

"""Create a ROI object. The ROI object comes with three default templates :
* 'brodmann' : Brodmann areas
* 'aal' : Automatic Anatomical Labeling
* 'talairach'

You can also define your own RoiObj with your own labels.
コード例 #14
0
See : https://brainder.org/download/flair/

.. image:: ../../_static/examples/ex_crossec_and_volume.png
"""
from visbrain.gui import Brain
from visbrain.objects import CrossSecObj, VolumeObj
from visbrain.io import download_file

"""Import the volume and the associated affine transformation
"""
volume_name = 'GG-853-WM-0.7mm.nii.gz'  # 'GG-853-GM-0.7mm.nii.gz'

"""Download the file.
"""
path = download_file(volume_name, astype='example_data')

"""Define a cross-section object

Go to the Objects tab and select 'Cross-section' in the combo box. You can also
press x to display the cross-section panel.
"""
cs_obj = CrossSecObj(path, coords=(0., 0., 0.), cmap='gist_stern')

"""Define a volume object.

Go to the Objects tab and select 'Volume' in the combo box. You can also
press x to display the volume panel.
"""
v_obj = VolumeObj(path)
コード例 #15
0
ファイル: plot_x3d_files.py プロジェクト: EtienneCmb/visbrain
from visbrain.objects import BrainObj, SceneObj
from visbrain.io import download_file


sc = SceneObj(size=(1500, 800))


###############################################################################
# Ferret brain
###############################################################################
# Hutchinson EB, Schwerin SC, Radomski KL, Sadeghi N, Jenkins J, Komlosh ME,
# Irfanoglu MO, Juliano SL, Pierpaoli C (2017) "Population based MRI and DTI
# templates of the adult ferret brain and tools for voxelwise analysis"
# Neuroimage 152:575–589. [doi 10.1016/j.neuroimage.2017.03.009]

ferret_file = download_file('ferret.x3d', astype='example_data')
b_ferret_obj = BrainObj(ferret_file, translucent=False)
sc.add_to_subplot(b_ferret_obj, title='Ferret brain (Hutchinson et al. 2017)')


###############################################################################
# Macaque brain (1)
###############################################################################
# Markov NT, Ercsey-Ravasz MM, Ribeiro Gomes AR, Lamy C, Magrou L, Vezoli J,
# Misery P, Falchier A, Quilodran R, Gariel MA, Sallet J, Gamanut R,
# Huissoud C, Clavagnier S, Giroud P, Sappey-Marinier D, Barone P, Dehay C,
# Toroczkai Z, Knoblauch K, Van Essen DC, Kennedy H (2014) "A weighted and
# directed interareal connectivity matrix for macaque cerebral cortex." Cereb
# Cortex 24(1):17-36. [doi 10.1093/cercor/bhs270]

macaque_file_1 = download_file('macaque_1.x3d', astype='example_data')
コード例 #16
0
"""
Load a BrainVision file
=======================

This example demonstrate how to load a BrainVision file.

Required dataset at :
https://www.dropbox.com/s/t2bo9ufvc3f8mbj/sleep_brainvision.zip?dl=1

.. image:: ../../_static/examples/ex_LoadBrainVision.png
"""
import os
from visbrain.gui import Sleep
from visbrain.io import download_file, path_to_visbrain_data

###############################################################################
#                               LOAD YOUR FILE
###############################################################################
# Download dataset :
download_file("sleep_brainvision.zip", unzip=True, astype='example_data')
target_path = path_to_visbrain_data(folder='example_data')

dfile = os.path.join(target_path, 'sub-02.vhdr')
hfile = os.path.join(target_path, 'sub-02.hyp')
cfile = os.path.join(target_path, 'sub-02_config.txt')

# Open the GUI :
Sleep(data=dfile, hypno=hfile, config_file=cfile).show()
コード例 #17
0
 def test_set_activation(self):
     """Test function set_activation."""
     cs_obj.set_activation(download_file('GG-853-GM-0.7mm.nii.gz',
                                         astype='example_data'))
コード例 #18
0
import numpy as np

from visbrain.objects import RoiObj, ColorbarObj, SceneObj, SourceObj, BrainObj
from visbrain.io import download_file, path_to_visbrain_data, read_nifti

###############################################################################
# Download data
###############################################################################
# In order to work, this example need to download some data i.e coordinates of
# intracranial sources and a parcellates atlas (MIST) to illustrate how to
# define your own RoiObj

# Get the path to the ~/visbrain_data/example_data folder
vb_path = path_to_visbrain_data(folder='example_data')
# Download (x, y, z) coordinates of intracranial sources
mat = np.load(download_file('xyz_sample.npz', astype='example_data'))
xyz, subjects = mat['xyz'], mat['subjects']
data = np.random.uniform(low=-1., high=1., size=(xyz.shape[0], ))
# Download the MIST parcellates
download_file('MIST_ROI.zip', unzip=True, astype='example_data')

###############################################################################
# Scene creation
###############################################################################
# First, we need to create the scene that will host objects

# Scene creation with a dark background and a custom size
sc = SceneObj(size=(1400, 1000))
# In this example, we also illustrate the use of the colorbar object. Hence, we
# centralize colorbar properties inside a dictionary
CBAR_STATE = dict(cbtxtsz=12,
コード例 #19
0
 def test_save(self):
     """Test function save."""
     v_obj = VolumeObj(download_file('GG-853-GM-0.7mm.nii.gz',
                                     astype='example_data'))
     v_obj.save()
     v_obj.save(tmpfile=True)
コード例 #20
0
This example illustrate the feasibility to combine multiple objects in the same
scene.

fMRI activations comes from the PySurfer software
(https://github.com/nipy/PySurfer/).
"""
import numpy as np

from visbrain.objects import (BrainObj, SceneObj, SourceObj, RoiObj,
                              ConnectObj, CrossSecObj, TimeSeries3DObj,
                              Picture3DObj)
from visbrain.io import download_file
from visbrain.utils import generate_eeg

# Get the path to Visbrain data and download deep sources
mat = np.load(download_file('xyz_sample.npz', astype='example_data'))
xyz, subjects = mat['xyz'], mat['subjects']
data = np.random.uniform(low=-1., high=1., size=(xyz.shape[0],))

###############################################################################
# Scene creation
###############################################################################

CAM_STATE = dict(azimuth=0,        # azimuth angle
                 elevation=90,     # elevation angle
                 )
CBAR_STATE = dict(cbtxtsz=12, txtsz=10., width=.1, cbtxtsh=3.,
                  rect=(-.3, -2., 1., 4.))
sc = SceneObj(camera_state=CAM_STATE, size=(1400, 1000))

###############################################################################
コード例 #21
0
Control the cross-section panel and the volume using a Nifti file. The nibabel
package should also be installed.

See : https://brainder.org/download/flair/

.. image:: ../../_static/examples/ex_crossec_and_volume.png
"""
from visbrain.gui import Brain
from visbrain.objects import CrossSecObj, VolumeObj
from visbrain.io import download_file
"""Import the volume and the associated affine transformation
"""
volume_name = 'GG-853-WM-0.7mm.nii.gz'  # 'GG-853-GM-0.7mm.nii.gz'
"""Download the file.
"""
path = download_file(volume_name, astype='example_data')
"""Define a cross-section object

Go to the Objects tab and select 'Cross-section' in the combo box. You can also
press x to display the cross-section panel.
"""
cs_obj = CrossSecObj(path, coords=(0., 0., 0.), cmap='gist_stern')
"""Define a volume object.

Go to the Objects tab and select 'Volume' in the combo box. You can also
press x to display the volume panel.
"""
v_obj = VolumeObj(path)
"""Create the GUI and pass the cross-section and the volume object
"""
vb = Brain(cross_sec_obj=cs_obj, vol_obj=v_obj)
コード例 #22
0
ファイル: 12_parcellize.py プロジェクト: EtienneCmb/visbrain
used in this example.

.. image:: ../../_static/examples/ex_parcellates.png
"""
import numpy as np

from visbrain.gui import Brain
from visbrain.objects import BrainObj
from visbrain.io import download_file


file1 = 'lh.aparc.a2009s.annot'
file2 = 'rh.aparc.annot'

# Download files if needed :
path_to_file1 = download_file(file1, astype='example_data')
path_to_file2 = download_file(file2, astype='example_data')

# Define a brain object :
b_obj = BrainObj('inflated', hemisphere='both', translucent=False,
                 cblabel='Parcellates example', cbtxtsz=4.)

"""Parcellize the left hemisphere using the Destrieux Atlas. By default, no
parcellates are selected
"""
b_obj.parcellize(path_to_file1, hemisphere='left')

"""If you want to get the list of all predefined parcellates, use the
`get_parcellates` method which returns a pandas DataFrame with the index, the
name and the color associated to each parcellates
"""
コード例 #23
0
"""
Display fMRI activation
=======================

Display fMRI activations from a nii.gz file (NiBabel required).

See the original example :

https://pysurfer.github.io/auto_examples/plot_fmri_activation.html#sphx-glr-auto-examples-plot-fmri-activation-py

.. image:: ../../picture/picpysurfer/ex_pysurfer_fmri_activations.png
"""
from visbrain import Brain
from visbrain.objects import BrainObj
from visbrain.io import path_to_visbrain_data, download_file

"""Download file if needed
"""
file_name = 'lh.sig.nii.gz'
download_file(file_name)
file = path_to_visbrain_data(file=file_name)


b_obj = BrainObj('inflated', translucent=False, sulcus=True)
b_obj.add_activation(file=file, clim=(5., 20.), hide_under=5, cmap='viridis',
                     hemisphere='left')

vb = Brain(brain_obj=b_obj)
vb.rotate('left')
vb.show()
コード例 #24
0
"""
Display conjunction map
=======================

Display a conjunction map from a nii.gz file (NiBabel required).

See the original PySurfer example :

https://pysurfer.github.io/auto_examples/plot_fmri_conjunction.html#sphx-glr-auto-examples-plot-fmri-conjunction-py

.. image:: ../../_static/examples/ex_eegmeg_conjunction_map.png
"""
from visbrain.gui import Brain
from visbrain.objects import BrainObj
from visbrain.io import download_file

"""Download files if needed
"""
file_1 = download_file('lh.sig.nii.gz', astype='example_data')
file_2 = download_file('lh.alt_sig.nii.gz', astype='example_data')

b_obj = BrainObj('inflated', translucent=False, sulcus=True)
b_obj.add_activation(file=file_1, clim=(4., 30.), hide_under=4, cmap='Reds_r',
                     hemisphere='left')
b_obj.add_activation(file=file_2, clim=(4., 30.), hide_under=4, cmap='Blues_r',
                     hemisphere='left')

vb = Brain(brain_obj=b_obj)
vb.rotate('left')
vb.show()
コード例 #25
0
https://www.dropbox.com/s/bj1ra95rbksukro/sleep_edf.zip?dl=1

.. image:: ../../_static/examples/ex_replace_detection_basic.png
"""
###############################################################################
# Load your file and create an instance of Sleep
###############################################################################

import os
import numpy as np

from visbrain.gui import Sleep
from visbrain.io import download_file, path_to_visbrain_data

# Download the file :
download_file('sleep_edf.zip', unzip=True, astype='example_data')
target_path = path_to_visbrain_data(folder='example_data')

# Get data path :
dfile = os.path.join(target_path, 'excerpt2.edf')            # data
hfile = os.path.join(target_path, 'Hypnogram_excerpt2.txt')  # hypnogram
cfile = os.path.join(target_path, 'excerpt2_config.txt')     # GUI config

# Define an instance of Sleep :
sp = Sleep(data=dfile, hypno=hfile, config_file=cfile)

###############################################################################
# Define new methods
###############################################################################

###############################################################################
コード例 #26
0
"""
Display fMRI activation
=======================

Display fMRI activations from a nii.gz file (NiBabel required).

See the original example :

https://pysurfer.github.io/auto_examples/plot_fmri_activation.html#sphx-glr-auto-examples-plot-fmri-activation-py

.. image:: ../../_static/examples/ex_eegmeg_fmri_activations.png
"""
from visbrain.gui import Brain
from visbrain.objects import BrainObj
from visbrain.io import download_file

"""Download file if needed
"""
file = download_file('lh.sig.nii.gz', astype='example_data')

b_obj = BrainObj('inflated', translucent=False, sulcus=True)
b_obj.add_activation(file=file, clim=(5., 20.), hide_under=5, cmap='viridis',
                     hemisphere='left')

vb = Brain(brain_obj=b_obj)
vb.rotate('left')
vb.show()
コード例 #27
0
Download source's coordinates (xyz_sample.npz) :
https://www.dropbox.com/s/whogfxutyxoir1t/xyz_sample.npz?dl=1

.. image:: ../../picture/picbrain/ex_pictures.png
"""
import numpy as np

from visbrain import Brain
from visbrain.objects import Picture3DObj, SourceObj
from visbrain.io import download_file

kwargs = {}
# Load the xyz coordinates and corresponding subject name :

s_xyz = np.load(download_file('xyz_sample.npz'))['xyz']
s_xyz = s_xyz[4::10, ...]
n_sources = s_xyz.shape[0]
"""Define a source object
"""
s_obj = SourceObj('MySources', s_xyz, symbol='disc', color='green')
"""Define picture data
"""
sf = 1024.
n = 50
x, y = np.ogrid[0:n / 2, 0:n / 2]
x, y = np.append(x, np.flip(x, 0)), np.append(y, np.flip(y, 1))
time = (x.reshape(-1, 1) + y.reshape(1, -1)) / sf
time = np.tile(time[np.newaxis, ...], (n_sources, 1, 1))
coef = s_xyz[:, 0].reshape(-1, 1, 1) / 2.
data = np.sinc(coef * 2 * np.pi * 1. * time)
コード例 #28
0
================================================

This example illustrate how to define a custom brain template using your own
vertices and faces.

.. image:: ../../_static/examples/ex_add_brain_template.png
"""
import numpy as np

from visbrain.gui import Brain
from visbrain.objects import BrainObj
from visbrain.io import download_file
"""Download and the load the Custom.npz archive. This file contains vertices
and faces of a brain template that is not integrated by default in Visbrain.
"""
mat = np.load(download_file('Custom.npz', astype='example_data'))
"""Get vertices and faces from the archive.

In this examples, normals are also present in the archive. If you don't have
the normals, the BrainObj will compute it automatically.
"""
vert, faces, norms = mat['vertices'], mat['faces'], mat['normals']
"""Define the brain object
"""
b_obj = BrainObj('Custom', vertices=vert, faces=faces, normals=norms)
"""Then you have two strategies :
* If you are going to use this template a lot and don't want to redefine it
  every times, use `b_obj.save()`. Once the object saved, it can be reloaded
  using its name only `BrainObj('Custom')`
* If you only need it once, the template is temporaly saved and remove once the
  GUI is closed.
コード例 #29
0
ファイル: _tests_visbrain.py プロジェクト: skjerns/visbrain
 def need_file(self, file):
     """Path to a needed file from visbrain-data."""
     return download_file(file, astype='example_data')
コード例 #30
0
This script use a custom nifti volume downloadable at :
https://brainder.org/download/flair/

.. image:: ../../picture/picbrain/ex_add_nifti.png
"""
from __future__ import print_function
import numpy as np

from visbrain import Brain
from visbrain.objects import VolumeObj, CrossSecObj, SourceObj
from visbrain.io import download_file

"""Download two NIFTI files
"""
path_1 = download_file('GG-853-GM-0.7mm.nii.gz')
path_2 = download_file('GG-853-WM-0.7mm.nii.gz')

"""Define four sources sources and a Source object
"""
s_xyz = np.array([[29.9, -37.3, -19.3],
                  [-5.33, 14.00, 20.00],
                  [25.99, 14.00, 34.66],
                  [0., -1.99, 10.66]])
s_obj = SourceObj('MySources', s_xyz)

"""Define a volume object and a cross-section object
"""
vol_obj = VolumeObj(path_1)
cross_sec_obj = CrossSecObj(path_2)
コード例 #31
0
ファイル: load_elan.py プロジェクト: EtienneCmb/visbrain
"""
Load ELAN files
===============

This example demonstrate how to load an ELAN file.

Required dataset at :
https://www.dropbox.com/s/95xvdqivpgk90hg/sleep_elan.zip?dl=1

.. image:: ../../_static/examples/ex_LoadElan.png
"""
import os
from visbrain.gui import Sleep
from visbrain.io import download_file, path_to_visbrain_data

###############################################################################
#                               LOAD YOUR FILE
###############################################################################
# Download dataset :
download_file("sleep_elan.zip", unzip=True, astype='example_data')
target_path = path_to_visbrain_data(folder='example_data')

dfile = os.path.join(target_path, 'sub-02.eeg')
hfile = os.path.join(target_path, 'sub-02.hyp')

# Open the GUI :
Sleep(data=dfile, hypno=hfile).show()
コード例 #32
0
Control the cross-section panel and the volume using a Nifti file. The nibabel
package should also be installed.

See : https://brainder.org/download/flair/

.. image:: ../../picture/picbrain/ex_crossec_and_volume.png
"""
from visbrain import Brain
from visbrain.objects import CrossSecObj, VolumeObj
from visbrain.io import download_file
"""Import the volume and the associated affine transformation
"""
volume_name = 'GG-853-WM-0.7mm.nii.gz'  # 'GG-853-GM-0.7mm.nii.gz'
"""Download the file.
"""
path = download_file(volume_name)
"""Define a cross-section object

Go to the Objects tab and select 'Cross-section' in the combo box. You can also
press x to display the cross-section panel.
"""
cs_obj = CrossSecObj(path, section=(70, 171, 80), cmap='gist_stern')
"""Define a volume object.

Go to the Objects tab and select 'Volume' in the combo box. You can also
press x to display the volume panel.
"""
v_obj = VolumeObj(path)
"""Create the GUI and pass the cross-section and the volume object
"""
vb = Brain(cross_sec_obj=cs_obj, vol_obj=v_obj)
コード例 #33
0
.. image:: ../../picture/picsleep/ex_LoadMatlab.png
"""
import os
import numpy as np
from scipy.io import loadmat

from visbrain import Sleep
from visbrain.io import download_file, path_to_visbrain_data

###############################################################################
#                               LOAD YOUR FILE
###############################################################################
current_path = path_to_visbrain_data()
target_path = os.path.join(current_path, 'sleep_data', 'matlab')

# Download matlab file :
download_file("sleep_matlab.zip", unzip=True, to_path=target_path)

# Load the matlab file :
mat = loadmat(os.path.join(target_path, 's2_sleep.mat'))

# Get the data, sampling frequency and channel names :
raw_data = mat['data']
raw_sf = float(mat['sf'])
raw_channels = np.concatenate(mat['channels'].flatten()).tolist()
raw_hypno = mat['hypno'].flatten()

# Open the GUI :
Sleep(data=raw_data, sf=raw_sf, channels=raw_channels, hypno=raw_hypno).show()
コード例 #34
0
"""
Display fMRI activation
=======================

Display fMRI activations from a nii.gz file (NiBabel required).

See the original example :

https://pysurfer.github.io/auto_examples/plot_fmri_activation.html#sphx-glr-auto-examples-plot-fmri-activation-py

.. image:: ../../_static/examples/ex_eegmeg_fmri_activations.png
"""
from visbrain.gui import Brain
from visbrain.objects import BrainObj
from visbrain.io import download_file
"""Download file if needed
"""
file = download_file('lh.sig.nii.gz', astype='example_data')

b_obj = BrainObj('inflated', translucent=False, sulcus=True)
b_obj.add_activation(file=file,
                     clim=(5., 20.),
                     hide_under=5,
                     cmap='viridis',
                     hemisphere='left')

vb = Brain(brain_obj=b_obj)
vb.rotate('left')
vb.show()
コード例 #35
0
"""Test command lines."""
import os

from visbrain.gui import Figure
from visbrain.io import download_file, path_to_visbrain_data
from visbrain.tests._tests_visbrain import _TestVisbrain

# List of image files to test with :
_FILES = [
    'default.png', 'inside.png', 'count.png', 'density.png', 'repartition.jpg',
    'roi.jpg'
]
download_file('figure.zip', unzip=True, astype='example_data')

# Create a tmp/ directory :
dir_path = os.path.dirname(os.path.realpath(__file__))
path_to_tmp = os.path.join(*(dir_path, 'tmp'))


class TestFigure(_TestVisbrain):
    """Test figure.py."""

    ###########################################################################
    #                                 FIGURE
    ###########################################################################
    def test_figure(self):
        """Test function figure."""
        # Get files :
        files = [path_to_visbrain_data(k, 'example_data') for k in _FILES]

        # Titles :
コード例 #36
0
 def need_file(self, file):
     """Path to a needed file from visbrain-data."""
     return download_file(file)
コード例 #37
0
ファイル: brain_test.py プロジェクト: daoos/Mango
from __future__ import print_function
import numpy as np

from visbrain import Brain
from visbrain.objects import SourceObj, ConnectObj
from visbrain.io import download_file, path_to_visbrain_data

# Create an empty kwargs dictionnary :
kwargs = {}

# ____________________________ DATA ____________________________

# Load the xyz coordinates and corresponding subject name :
'''
download_file('xyz_sample.npz')
mat = np.load(path_to_visbrain_data('xyz_sample.npz'))
xyz, subjects = mat['xyz'], mat['subjects']

print(xyz.shape)
print(subjects.shape)
'''

mat = np.load('weight.npz')
xyz, subjects = mat['arr_0'][0], mat['arr_1'][0]

xyz = np.random.rand(583, 3)
xyz = xyz * 70
subjects = np.random.rand(3, 583)
subjects = subjects * 70

#subjects = xyz * 2.7
コード例 #38
0
ファイル: plot_brain_obj.py プロジェクト: skjerns/visbrain
                  width=.1,
                  cbtxtsh=3.,
                  rect=(-.3, -2., 1., 4.))
KW = dict(title_size=14., zoom=1.2)

###############################################################################
# .. note::
#     The BrainObj can interact with sources (SourceObj). For example, if the
#     source object represent intracranial data (e.g iEEG) those sources can
#     be projected on the surface of the brain. This is an important feature
#     because intracranial implantations is usually subject dependant and the
#     projection is a good way to plot results across subjects. To illustrate
#     this feature, we provide a set of intracranial MNI coordinates.

# Download iEEG coordinates and define some random data
mat = np.load(download_file('xyz_sample.npz', astype='example_data'))
xyz, subjects = mat['xyz'], mat['subjects']
data = np.random.rand(xyz.shape[0])

###############################################################################
# Basic brain using MNI template
###############################################################################
# By default, Visbrain include several MNI brain templates (B1, B3, B3,
# inflated, white and shere).

# Translucent inflated BrainObj with both hemispheres displayed
b_obj_fs = BrainObj('inflated', translucent=True, hemisphere='both')
# Add the brain to the scene. Note that `row_span` means that the plot will
# occupy two rows (row 0 and 1)
sc.add_to_subplot(b_obj_fs,
                  row=0,
コード例 #39
0
This example illustrate the feasibility to combine multiple objects in the same
scene.

.. image:: ../../picture/picobjects/ex_combine_obj.png
"""
import numpy as np

from visbrain.objects import (BrainObj, SceneObj, SourceObj, RoiObj,
                              ConnectObj, CrossSecObj, TimeSeries3DObj,
                              Picture3DObj)
from visbrain.io import download_file
from visbrain.utils import generate_eeg

"""Get the path to Visbrain data and download deep sources
"""
mat = np.load(download_file('xyz_sample.npz'))
xyz, subjects = mat['xyz'], mat['subjects']
data = np.random.uniform(low=-1., high=1., size=(xyz.shape[0],))

print("""
=============================================================================
                             Default scene
=============================================================================
""")
CAM_STATE = dict(azimuth=0,        # azimuth angle
                 elevation=90,     # elevation angle
                 )
CBAR_STATE = dict(cbtxtsz=12, txtsz=10., width=.1, cbtxtsh=3.,
                  rect=(-.3, -2., 1., 4.))
sc = SceneObj(camera_state=CAM_STATE, size=(1400, 1000))
コード例 #40
0
###############################################################################
# .. warning::
#     To be clear with the vocabulary used, the SourceObj has a different
#     meaning depending on the recording type. For scalp or intracranial EEG,
#     sources reflect electrode, in MEG it could be sensors or source
#     reconstruction.

###############################################################################
# Download data
###############################################################################
# To illustrate the functionalities of the source object, here, we download an
# intracranial dataset consisting of 583 deep recording sites.

# Download the file and get the (x, y, z) MNI coordinates of the 583 recording
# sites
mat = np.load(download_file('xyz_sample.npz', astype='example_data'))
xyz = mat['xyz']
n_sources = xyz.shape[0]
text = ['S' + str(k) for k in range(n_sources)]

###############################################################################
# Scene creation
###############################################################################
# As said in other tutorials, the scene is equivalent with Matplotlib subplots.
# So here, we define a scene that is going to centralize objects in subplots

# Define the default camera state used for each subplot
CAM_STATE = dict(azimuth=0,        # azimuth angle
                 elevation=90,     # elevation angle
                 scale_factor=180  # distance to the camera
                 )
コード例 #41
0
import numpy as np
from itertools import product

from vispy.app.canvas import MouseEvent, KeyEvent
# from vispy.util.keys import Key

from visbrain import Brain
from visbrain.objects import (SourceObj, ConnectObj, TimeSeries3DObj,
                              Picture3DObj, RoiObj, VolumeObj, CrossSecObj)
from visbrain.io import download_file
from visbrain.tests._tests_visbrain import _TestVisbrain


# Download intrcranial xyz :
mat = np.load(download_file('xyz_sample.npz'))
xyz_full = mat['xyz']
mat.close()
xyz_1, xyz_2 = xyz_full[20:30, :], xyz_full[10:20, :]


# ---------------- Sources ----------------
# Define some random sources :
s_data = 100 * np.random.rand(10)
s_color = ['blue'] * 3 + ['white'] * 3 + ['red'] * 4
s_mask = np.array([True] + [False] * 9)

s_obj1 = SourceObj('S1', xyz_1, data=s_data, color=s_color, mask=s_mask)
s_obj2 = SourceObj('S2', xyz_2, data=2 * s_data, color=s_color,
                   mask=s_mask)
コード例 #42
0
ファイル: 03_sources.py プロジェクト: skjerns/visbrain
attach some data to sources and project this activity onto the surface
(cortical projection). Alternatively, you can run the cortical repartition
which is defined as the number of contributing sources per vertex.

.. image:: ../../_static/examples/ex_sources.png
"""
import numpy as np

from visbrain.gui import Brain
from visbrain.objects import SourceObj, BrainObj
from visbrain.io import download_file

kwargs = {}
"""Load the xyz coordinates and corresponding subject name
"""
mat = np.load(download_file('xyz_sample.npz', astype='example_data'))
xyz, subjects = mat['xyz'], mat['subjects']
"""The "subjects" list is composed of 6 diffrents subjects and here we set one
unique color (u_color) per subject.
"""
u_color = ["#9b59b6", "#3498db", "white", "#e74c3c", "#34495e", "#2ecc71"]
kwargs['color'] = [u_color[int(k[1])] for k in subjects]
kwargs['alpha'] = 0.7
"""
Now we attach data to each source.
"""
kwargs['data'] = np.arange(len(subjects))
"""The source's radius is proportional to the data attached. But this
proportion can be controlled using a minimum and maximum radius
(s_radiusmin, s_radiusmax)
"""
コード例 #43
0
.. image:: ../../picture/picsleep/ex_LoadMNE.png
"""
import os
from mne import io
from visbrain import Sleep
from visbrain.io import download_file, path_to_visbrain_data

###############################################################################
#                               LOAD YOUR FILE
###############################################################################
current_path = path_to_visbrain_data()
target_path = os.path.join(current_path, 'sleep_data', 'brainvision')

# Download dataset :
download_file("sleep_brainvision.zip", unzip=True, to_path=target_path)

dfile = os.path.join(target_path, 'sub-02.vhdr')
hfile = os.path.join(target_path, 'sub-02.hyp')

# Read raw data using MNE-python :
raw = io.read_raw_brainvision(vhdr_fname=dfile, preload=True)

# Extract data, sampling frequency and channels names
data, sf, chan = raw._data, raw.info['sfreq'], raw.info['ch_names']

# Now, pass all the arguments to the Sleep module :
Sleep(data=data, sf=sf, channels=chan, hypno=hfile).show()

# Alternatively, these steps can be done automatically by using the 'use_mne'
# input argument of sleep
コード例 #44
0
Illustration and main functionalities and inputs of the cross-section object.

.. image:: ../../picture/picobjects/ex_cs_obj.png
"""
from visbrain.objects import CrossSecObj, SceneObj
from visbrain.io import download_file

sc = SceneObj()

print("""
# =============================================================================
#                              Brodmann area
# =============================================================================
""")
cs_brod = CrossSecObj('brodmann',
                      interpolation='nearest',
                      section=(70, 80, 90))
cs_brod.localize_source((-10., -15., 20.))
sc.add_to_subplot(cs_brod, row=0, col=0, title='Brodmann area')

print("""
# =============================================================================
#                              Nii.gz file
# =============================================================================
""")
path = download_file('GG-853-GM-0.7mm.nii.gz')
cs_cust = CrossSecObj(path, section=(90, 80, 100), cmap='gist_stern')
sc.add_to_subplot(cs_cust, row=0, col=1, title='Nii.gz file')

sc.preview()
コード例 #45
0
See the original example :

https://pysurfer.github.io/auto_examples/plot_vector_meg_inverse_solution.html#sphx-glr-auto-examples-plot-vector-meg-inverse-solution-py

.. image:: ../../_static/examples/ex_eegmeg_vector_meg_inverse.png
"""
import numpy as np

from visbrain.gui import Brain
from visbrain.objects import BrainObj, VectorObj
from visbrain.io import read_stc, download_file

"""Download file if needed
"""
stc_file = download_file('meg_source_estimate-lh.stc', astype='example_data')

# Read the *.stc file :
file = read_stc(stc_file)

# Get the data and vertices from the file :
data = file['data'][:, 2]
vertices = file['vertices']

# Define a brain object and add the data to the mesh :
b_obj = BrainObj('white', translucent=True, hemisphere='left')
b_obj.add_activation(data=data, vertices=vertices, smoothing_steps=5,
                     clim=(7., 21.), hide_under=7., cmap='viridis')

# Build arrows :
dt = np.dtype([('vertices', float, 3), ('normals', float, 3)])
コード例 #46
0
import numpy as np

from visbrain.objects.tests._testing_objects import _TestVolumeObject
from visbrain.objects import SourceObj, RoiObj
from visbrain.io import (download_file, path_to_visbrain_data, read_nifti,
                         clean_tmp)

roi_obj = RoiObj('brodmann')
roi_obj.select_roi([4, 6])
rnd = np.random.RandomState(0)
xyz = 100. * rnd.rand(50, 3)
xyz[:, 0] -= 50.
xyz[:, 1] -= 50.
s_obj = SourceObj('S1', xyz)

download_file('MIST_ROI.zip', unzip=True)
nifti_file = path_to_visbrain_data('MIST_ROI.nii.gz')
csv_file = path_to_visbrain_data('MIST_ROI.csv')
# Read the .csv file :
arr = np.genfromtxt(csv_file, delimiter=';', dtype=str)
# Get column names, labels and index :
column_names = arr[0, :]
arr = np.delete(arr, 0, 0)
n_roi = arr.shape[0]
roi_index = arr[:, 0].astype(int)
roi_labels = arr[:, [1, 2]].astype(object)
# Build the struct array :
label = np.zeros(n_roi, dtype=[('label', object), ('name', object)])
label['label'] = roi_labels[:, 0]
label['name'] = roi_labels[:, 1]
# Get the volume and the hdr transformation :
コード例 #47
0
    * Color connectivity links according to the number of connections per node
    * Color connectivity links using custom colors
"""
import numpy as np

from visbrain.objects import ConnectObj, SceneObj, SourceObj, BrainObj
from visbrain.io import download_file

###############################################################################
# Download data and define the scene
###############################################################################
# First, we download a connectivity dataset consisting of the location of each
# node (104) and the connectivity strength between every node (104, 104)

# Download data
arch = np.load(download_file('phase_sync_delta.npz', astype='example_data'))
nodes, edges = arch['nodes'], arch['edges']
# Create the scene with a black background
sc = SceneObj(size=(1500, 600))

###############################################################################
# Color by connectivity strength
###############################################################################
# First, we download a connectivity dataset consisting of the location of each
# node (iEEG site) and the connectivity strength between those nodes. The first
# coloring method illustrated bellow consist in coloring connections based on
# a colormap

# Coloring method
color_by = 'strength'
# Because we don't want to plot every connections, we only keep connections
コード例 #48
0
import numpy as np

from visbrain.objects import RoiObj, ColorbarObj, SceneObj, SourceObj, BrainObj
from visbrain.io import download_file, path_to_visbrain_data, read_nifti

###############################################################################
# Download data
###############################################################################
# In order to work, this example need to download some data i.e coordinates of
# intracranial sources and a parcellates atlas (MIST) to illustrate how to
# define your own RoiObj

# Get the path to the ~/visbrain_data/example_data folder
vb_path = path_to_visbrain_data(folder='example_data')
# Download (x, y, z) coordinates of intracranial sources
mat = np.load(download_file('xyz_sample.npz', astype='example_data'))
xyz, subjects = mat['xyz'], mat['subjects']
data = np.random.uniform(low=-1., high=1., size=(xyz.shape[0],))
# Download the MIST parcellates
download_file('MIST_ROI.zip', unzip=True, astype='example_data')

###############################################################################
# Scene creation
###############################################################################
# First, we need to create the scene that will host objects

# Scene creation with a dark background and a custom size
sc = SceneObj(size=(1400, 1000))
# In this example, we also illustrate the use of the colorbar object. Hence, we
# centralize colorbar properties inside a dictionary
CBAR_STATE = dict(cbtxtsz=12, txtsz=10., width=.1, cbtxtsh=3.,
コード例 #49
0
"""
###############################################################################
# Load your file and create an instance of Sleep
###############################################################################

import os
import numpy as np

from visbrain.gui import Sleep
from visbrain.io import download_file, path_to_visbrain_data

from wonambi.detect.spindle import DetectSpindle, detect_Moelle2011
from wonambi.detect.slowwave import DetectSlowWave, detect_Massimini2004

# Download the file :
download_file('sleep_edf.zip', unzip=True, astype='example_data')
target_path = path_to_visbrain_data(folder='example_data')

# Get data path :
dfile = os.path.join(target_path, 'excerpt2.edf')  # data
hfile = os.path.join(target_path, 'Hypnogram_excerpt2.txt')  # hypnogram
cfile = os.path.join(target_path, 'excerpt2_config.txt')  # GUI config

# Define an instance of Sleep :
sp = Sleep(data=dfile, hypno=hfile, config_file=cfile)

###############################################################################
# Define new methods
###############################################################################

###############################################################################
コード例 #50
0
"""
Load EDF file
=============

This example demonstrate how to load an EDF file.

Required dataset at :
https://www.dropbox.com/s/bj1ra95rbksukro/sleep_edf.zip?dl=1

.. image:: ../../picture/picsleep/ex_LoadEDF.png
"""
import os
from visbrain import Sleep
from visbrain.io import download_file

current_path = os.getcwd()
target_path = os.path.join(current_path, 'data', 'edf')

download_file('sleep_edf.zip', unzip=True, to_path=target_path)

dfile = os.path.join(target_path, 'excerpt2.edf')
hfile = os.path.join(target_path, 'Hypnogram_excerpt2.txt')
cfile = os.path.join(target_path, 'excerpt2_config.txt')

Sleep(data=dfile, hypno=hfile, config_file=cfile).show()
コード例 #51
0
This script use a custom nifti volume downloadable at :
https://brainder.org/download/flair/

.. image:: ../../_static/examples/ex_add_nifti.png
"""
from __future__ import print_function
import numpy as np

from visbrain.gui import Brain
from visbrain.objects import VolumeObj, CrossSecObj, SourceObj
from visbrain.io import download_file

"""Download two NIFTI files
"""
path_1 = download_file('GG-853-GM-0.7mm.nii.gz', astype='example_data')
path_2 = download_file('GG-853-WM-0.7mm.nii.gz', astype='example_data')

"""Define four sources sources and a Source object
"""
s_xyz = np.array([[29.9, -37.3, -19.3],
                  [-5.33, 14.00, 20.00],
                  [25.99, 14.00, 34.66],
                  [0., -1.99, 10.66]])
s_obj = SourceObj('MySources', s_xyz)

"""Define a volume object and a cross-section object
"""
vol_obj = VolumeObj(path_1)
cross_sec_obj = CrossSecObj(path_2)
コード例 #52
0
"""Test command lines."""
import os

from visbrain import Figure
from visbrain.io import download_file, path_to_visbrain_data
from visbrain.tests._tests_visbrain import _TestVisbrain

# List of image files to test with :
_FILES = ['default.png', 'inside.png', 'count.png', 'density.png',
          'repartition.jpg', 'roi.jpg']
all_downloaded = [os.path.isfile(path_to_visbrain_data(k)) for k in _FILES]
if not all(all_downloaded):
    download_file('figure.zip', unzip=True)

# Create a tmp/ directory :
dir_path = os.path.dirname(os.path.realpath(__file__))
path_to_tmp = os.path.join(*(dir_path, 'tmp'))


class TestFigure(_TestVisbrain):
    """Test figure.py."""

    ###########################################################################
    #                                 FIGURE
    ###########################################################################
    def test_figure(self):
        """Test function figure."""
        # Get files :
        files = [path_to_visbrain_data(k) for k in _FILES]

        # Titles :
コード例 #53
0
ファイル: plot_topo_obj.py プロジェクト: EtienneCmb/visbrain
    levels with custom colors)
  * Display connectivity
"""
import numpy as np

from visbrain.objects import TopoObj, ColorbarObj, SceneObj
from visbrain.io import download_file

###############################################################################
# Download data
###############################################################################
# First, we download the data. A directory should be created to
# ~/visbrain_data/example_data. This example contains beta power for several
# channels defined by there xy coordinates.

path = download_file('topoplot_data.npz', astype='example_data')
mat = np.load(path)
xy, data = mat['xyz'], mat['data']
channels = [str(k) for k in range(len(data))]
# Plotting properties shared across topoplots and colorbar :
kw_top = dict(margin=15 / 100, chan_offset=(0., 0.1, 0.), chan_size=10)
kw_cbar = dict(cbtxtsz=12, txtsz=10., width=.3, txtcolor='black', cbtxtsh=1.8,
               rect=(0., -2., 1., 4.), border=False)

###############################################################################
# Creation of the scene
###############################################################################
# Create a scene with a white background

sc = SceneObj(bgcolor='white', size=(1600, 900))