Exemplo n.º 1
0
    def test_exists_controls_editor_dialog_style(self):
        x = File(exists=True)
        editor = x.create_editor()
        self.assertEqual(editor.dialog_style, "open")

        x = File(exists=False)
        editor = x.create_editor()
        self.assertEqual(editor.dialog_style, "save")
Exemplo n.º 2
0
class Timestamps(VisModule):
    name = Str('Time Stamps')
    file = File(exists=True)
    timing_array = Str
    timing_channels = Str('0')
    time_set = List(TimeStampSet)
    selected_set = Instance(TimeStampSet)
    add_set = Button('Load timestamps')
    pop_set = Button('Remove timestamps')

    def _add_set_fired(self):
        used_colors = set([s.color for s in self.time_set])
        unused_colors = color_cycle - used_colors
        clr = list(unused_colors)[0]
        new_set = TimeStampSet(name='Set ' + str(len(self.time_set) + 1),
                               color=clr,
                               file=self.file,
                               timing_array=self.timing_array,
                               timing_channels=self.timing_channels,
                               parent=self.parent)
        self.time_set.append(new_set)

    def _pop_set_fired(self):
        set = self.selected_set
        set._unload()
        self.selected_set = None
        self.time_set.remove(set)

    def default_traits_view(self):
        from ..data_files import FileHandler
        v = View(
            HGroup(
                VGroup(
                    Group(Label('HDF5 file with timing signal'),
                          UItem('file')),
                    HGroup(
                        Group(
                            Label('Array with timing channel(s)'),
                            UItem('timing_array',
                                  editor=EnumEditor(name='handler.fields'))),
                        Group(
                            Label('Channel(s) with timing signal (comma-sep)'),
                            UItem('timing_channels')))),
                ## Item('is_binary', label='Binary series?'),
                VGroup(UItem('add_set', enabled_when='len(timing_source) > 0'),
                       UItem('pop_set',
                             enabled_when='len(timing_source) > 0')),
                Group(UItem('time_set', editor=tab_editor), show_border=True),
            ),
            handler=FileHandler)
        return v
Exemplo n.º 3
0
class GSODDataPlotterView(HasTraits):
    """ Application of the zoom tool to the GSOD plotting tool.
    Load a HDF file containing one or more timeseries and plot the entire data inside.
    The zoom tool allows to explore a subset of it. The legend allows to (de)select some
    timeseries.
    """
    data_file = File()
    ts_data = Dict()
    ts_plot = Instance(ToolbarPlot)

    traits_view = View(
            VGroup(Item('data_file', style = 'simple', label="HDF file to load"), 
                   Item('ts_plot', editor=ComponentEditor(size=(800, 600)), 
                        show_label=False),), 
            title='Chaco Plot with file loader and legend highlighter',
            width=900, height=800, resizable=True)

    def __init__(self, pandas_list = [], array_dict = {}, *args, **kw):
        """ If a (list of) pandas or a dict of arrays is passed, load them up. 
        """
        ts_data = {}
        super(GSODDataPlotterView, self).__init__(*args, **kw)
        if not isinstance(pandas_list, list):
            pandas_list = [pandas_list]
        if pandas_list:
            ts_data.update(pandas2array_dict(pandas_list))
        if array_dict:
            ts_data.update(ts_dict)
        self.ts_data = ts_data # Now trigger the plot redraw

    def _data_file_changed(self):
       """ Update the data from the HDF5 file.
       """
       self.ts_data = pandas_hdf_to_data_dict(self.data_file)
       assert("index" in self.ts_data)

    def _ts_data_changed(self):
        """ Dataset has changed: update the plot.
        ENH: add the possibility to pass a dict to ArrayPlotData.
        """
        arr_data = ArrayPlotData()
        for k,v in self.ts_data.items():
            arr_data.set_data(k,v)
        self.ts_plot = ToolbarPlot(arr_data)
        for i, k in enumerate([k for k in self.ts_data.keys() if k != "index"]):
            self.ts_plot.plot(("index", k), name = k, color = colors[i % len(colors)])
        if self.data_file:
            self.ts_plot.title = "Time series visualization from %s" % self.data_file
        else:
            self.ts_plot.title = "Time series visualization"
        attach_tools(self.ts_plot)
Exemplo n.º 4
0
class SurfaceSource(HasTraits):
    """Expose points and tris of a file storing a surface.

    Parameters
    ----------
    file : File
        Path to a *-bem.fif file or a surface containing a Freesurfer surface.

    Attributes
    ----------
    pts : Array, shape = (n_pts, 3)
        Point coordinates.
    tris : Array, shape = (n_tri, 3)
        Triangles.

    Notes
    -----
    tri is always updated after pts, so in case downstream objects depend on
    both, they should sync to a change in tris.
    """

    file = File(exists=True, filter=['*.fif', '*.*'])
    points = Array(shape=(None, 3), value=np.empty((0, 3)))
    norms = Array
    tris = Array(shape=(None, 3), value=np.empty((0, 3)))

    @on_trait_change('file')
    def read_file(self):
        if os.path.exists(self.file):
            if self.file.endswith('.fif'):
                bem = read_bem_surfaces(self.file)[0]
                self.points = bem['rr']
                self.norms = bem['nn']
                self.tris = bem['tris']
            else:
                try:
                    points, tris = read_surface(self.file)
                    points /= 1e3
                    self.points = points
                    self.norms = []
                    self.tris = tris
                except Exception:
                    error(message="Error loading surface from %s (see "
                          "Terminal for details).",
                          title="Error Loading Surface")
                    self.reset_traits(['file'])
                    raise
        else:
            self.points = np.empty((0, 3))
            self.norms = np.empty((0, 3))
            self.tris = np.empty((0, 3))
Exemplo n.º 5
0
class bk_mat_import(time_data_import):
    """
    import of BK pulse matlab data
    """

    # name of the mat file to import
    from_file = File(filter=['*.mat'],
                     desc="name of the BK pulse mat file to import")

    traits_view = View(['from_file', '|[Import]'],
                       title='Time data',
                       buttons=OKCancelButtons)

    def get_data(self, td):
        """
        main work is done here: imports the data from pulse .mat file into
        time_data object td and saves also a '*.h5' file so this import
        need not be performed every time the data is needed
        """
        if not path.isfile(self.from_file):
            # no file there
            time_data_import.getdata(self, td)
            return
        #import data
        from scipy.io import loadmat
        m = loadmat(self.from_file)
        fh = m['File_Header']
        numchannels = int(fh.NumberOfChannels)
        l = int(fh.NumberOfSamplesPerChannel)
        sample_freq = float(fh.SampleFrequency.replace(', ', '.'))
        data = empty((l, numchannels), 'f')
        for i in range(numchannels):
            # map SignalName "Point xx" to channel xx-1
            ii = int(m["Channel_%i_Header" % (i + 1)].SignalName[-2:]) - 1
            data[:, ii] = m["Channel_%i_Data" % (i + 1)]
        name = td.name
        if name == "":
            name = path.join(td_dir, \
                path.splitext(path.basename(self.from_file))[0]+'.h5')
        else:
            if td.h5f != None:
                td.h5f.close()
        # TODO problems with already open h5 files from other instances
        f5h = tables.open_file(name, mode='w')
        ac = f5h.create_earray(f5h.root, 'time_data', \
            tables.atom.Float32Atom(), (0, numchannels))
        ac.set_attr('sample_freq', sample_freq)
        ac.append(data)
        f5h.close()
        td.name = name
        td.load_data()
Exemplo n.º 6
0
class TrackLabelSource(HasTraits):
    """ Contains connection ids for streamlines
    """
    # identifying traits
    name = Str("")
    description = Str("")
    parameters = Dict()

    # file paths to data
    numpy_path = File("")
    graphml_path = File("")
    volume_path = File("")
    b0_volume_path = File("")
    qsdr_volume_path = File("")

    scalars = Array

    def load_array(self):
        self.scalars = np.load(os.path.join(self.base_dir,
                                            self.numpy_path)).astype(np.uint64)
        return self.scalars

    @on_trait_change("b0_volume_path")
    def update_params(self):
        if len(self.parameters) == 0:
            self.parameters = get_builtin_atlas_parameters(self.b0_volume_path)
        print self.parameters

    def to_json(self):
        return {
            "name": self.name,
            "description": self.description,
            "parameters": self.parameters,
            "numpy_path": self.numpy_path,
            "graphml_path": self.graphml_path,
            "b0_volume_path": self.b0_volume_path,
            "qsdr_volume_path": self.qsdr_volume_path
        }
Exemplo n.º 7
0
class ConnectedComponents(capsul.api.Process):
    """Extract connected components of a labelled volume"""

    input_image = File(
        Undefined, output=False, allowed_extensions=VOLUME_EXTENSIONS,
        desc="input label image")
    connectivity = Enum(
        "26", "4xy", "4xz", "4yz", "6", "8xy", "8xz", "8yz", "18",
        output=False, optional=True,
        desc="connectivity")

    output = File(
        Undefined, output=True, allowed_extensions=VOLUME_EXTENSIONS,
        desc="output labelled connected components volume")

    def get_commandline(self):
        return [
            "bv_env",  # needed to set DYLD_* in environment on Mac OS 10.11+
            "AimsConnectComp",
            "--input", self.input_image,
            "--output", self.output,
            "--connectivity", self.connectivity,
        ]
Exemplo n.º 8
0
class MovieTheater(HasTraits):
    url = File(filename)

    state = PlayerState()
    duration = Float()
    position = Range(low=0.0, high='duration')
    error = Str()
    status = MediaStatus()
    buffer = Range(0, 100)
    muted = Bool(True)
    volume = Range(0.0, 100.0, value=100.0)
    playback_rate = Float(1.0)
    image_func = Callable()
    notify_interval = Float(0.5)
Exemplo n.º 9
0
    def test_trait(self):
        """ Method to test trait characterisitics: value, type.
        """
        self.assertTrue(is_trait_value_defined(5))
        self.assertFalse(is_trait_value_defined(""))
        self.assertFalse(is_trait_value_defined(None))
        self.assertFalse(is_trait_value_defined(Undefined))

        trait = CTrait(0)
        trait.handler = Float()
        self.assertFalse(is_trait_pathname(trait))
        for handler in [File(), Directory()]:
            trait.handler = handler
            self.assertTrue(is_trait_pathname(trait))
Exemplo n.º 10
0
class LabelEachVoxel(capsul.api.Process):
    """Assign a unique label to each voxel of a mask"""

    input_image = File(Undefined,
                       output=False,
                       allowed_extensions=VOLUME_EXTENSIONS,
                       desc="input mask")
    first_label = Int(1,
                      output=False,
                      optional=True,
                      desc="assign labels starting with this value")

    output_image = File(Undefined,
                        output=True,
                        allowed_extensions=VOLUME_EXTENSIONS,
                        desc="output label volume with S32 datatype")

    def get_commandline(self):
        return [
            "ylLabelEachVoxel", "--first-label",
            str(self.first_label), "--input", self.input_image, "--output",
            self.output_image
        ]
Exemplo n.º 11
0
class RelabelConjunction(capsul.api.Process):
    """Assign new labels to voxels that have the same pair of labels"""

    labels1 = File(Undefined,
                   output=False,
                   allowed_extensions=VOLUME_EXTENSIONS,
                   desc="input label image")
    labels2 = File(Undefined,
                   output=False,
                   allowed_extensions=VOLUME_EXTENSIONS,
                   desc="input label image")

    output = File(Undefined,
                  output=True,
                  allowed_extensions=VOLUME_EXTENSIONS,
                  desc="output label image")

    def get_commandline(self):
        # bv_env automatically launches the command through Python on Windows
        return [
            "bv_env", "ylRelabelConjunction", self.labels1, self.labels2,
            self.output
        ]
Exemplo n.º 12
0
class SurfaceSource(HasTraits):
    """Expose points and tris of a file storing a surface.

    Parameters
    ----------
    file : File
        Path to a *-bem.fif file or a surface containing a Freesurfer surface.

    Attributes
    ----------
    pts : Array, shape = (n_pts, 3)
        Point coordinates.
    tris : Array, shape = (n_tri, 3)
        Triangles.

    Notes
    -----
    tri is always updated after pts, so in case downstream objects depend on
    both, they should sync to a change in tris.
    """

    file = File(exists=True, filter=['*.fif', '*.*'])
    surf = Instance(Surf)

    @on_trait_change('file')
    def read_file(self):
        """Read the file."""
        if op.exists(self.file):
            if self.file.endswith('.fif'):
                bem = read_bem_surfaces(self.file, verbose=False)[0]
            else:
                try:
                    bem = read_surface(self.file, return_dict=True)[2]
                    bem['rr'] *= 1e-3
                    complete_surface_info(bem, copy=False)
                except Exception:
                    error(parent=None,
                          message="Error loading surface from %s (see "
                          "Terminal for details)." % self.file,
                          title="Error Loading Surface")
                    self.reset_traits(['file'])
                    raise
            self.surf = Surf(rr=bem['rr'], tris=bem['tris'], nn=bem['nn'])
        else:
            self.surf = self._default_surf()

    def _surf_default(self):
        return Surf(rr=np.empty((0, 3)),
                    tris=np.empty((0, 3), int),
                    nn=np.empty((0, 3)))
Exemplo n.º 13
0
class Laplacian(capsul.api.Process):
    """Solve the Laplacian model in the cortex"""

    classif = File(
        Undefined,
        output=False,
        allowed_extensions=VOLUME_EXTENSIONS,
        desc="classification image of the cortex (100 inside, 0 in CSF, "
        "200 in white matter)")
    precision = Float(
        0.001,
        output=False,
        optional=True,
        desc="target maximum relative error in first-order finite differences")
    typical_cortical_thickness = Float(
        3,
        output=False,
        optional=True,
        desc="typical thickness of the cortex (mm), used for accelerating "
        "convergence")
    verbosity = Int(1, output=False, optional=True, desc="Verbosity level")

    laplace_field = File(
        Undefined,
        output=True,
        allowed_extensions=VOLUME_EXTENSIONS,
        desc="output pseudo-temperature field (from 0 in CSF to 1 in the "
        "white matter)")

    def get_commandline(self):
        return [
            "ylLaplacian", "--classif", self.classif, "--output",
            self.laplace_field, "--precision",
            repr(self.precision), "--typical-cortical-thickness",
            repr(self.typical_cortical_thickness), "--verbose",
            str(self.verbosity)
        ]
Exemplo n.º 14
0
class bk_mat_import( time_data_import ):
    """
    Import of BK pulse matlab data.
    """

    #: Name of the mat file to import
    from_file = File(filter = ['*.mat'], 
        desc = "name of the BK pulse mat file to import")

    def get_data (self, td):
        """
        Main work is done here: imports the data from pulse .mat file into
        time_data object 'td' and saves also a `*.h5` file so this import
        need not be performed every time the data is needed.
        """
        if not path.isfile(self.from_file):
            # no file there
            time_data_import.get_data(self, td)
            return
        #import data
        from scipy.io import loadmat
        m = loadmat(self.from_file)
        fh = m['File_Header']
        numchannels = int(fh.NumberOfChannels)
        l = int(fh.NumberOfSamplesPerChannel)
        sample_freq = float(fh.SampleFrequency.replace(', ', '.'))
        data = empty((l, numchannels), 'f')
        for i in range(numchannels):
            # map SignalName "Point xx" to channel xx-1
            ii = int(m["Channel_%i_Header" % (i+1)].SignalName[-2:])-1
            data[:, ii] = m["Channel_%i_Data" % (i+1)]
        name = td.name
        if name == "":
            name = path.join(td_dir, \
                path.splitext(path.basename(self.from_file))[0]+'.h5')
        else:
            if td.h5f !=  None:
                td.h5f.close()
        # TODO problems with already open h5 files from other instances
        file = _get_h5file_class()
        f5h = file(name, mode = 'w')
        f5h.create_extendable_array(
                'time_data', (0, numchannels), "float32")
        ac = f5h.get_data_by_reference('time_data')
        f5h.set_node_attribute(ac,'sample_freq',sample_freq)
        f5h.append_data(ac,data)
        f5h.close()
        td.name = name
        td.load_data()
Exemplo n.º 15
0
class MeshBuilderDataSourceModel(BaseDataSourceModel):
    """Class that constructs a domain mesh for an OpenFOAM simulation
    At the moment this is restricted to rectangular geometries for
    testing purposes
    NOTE: For the moment, only integers are supported for the dimensions
    of the domain. This is related to the observations that the solver
    as well as the snappyHexMesh-generator are failing when the extent
    of the blockMesh is heavily disharmonising with the number of cells
    into each direction. In order to work around this issue, the resolution
    shall be proportionally to the dimensions in each direction. Therefore,
    integers for the width, length and height of the blockMesh shall be
    required so that the amount of cells in x-, y- and z-direction
    shall also result into integers as well.
    """

    mesh_type = Enum(SUPPORTED_MESH_TYPES)

    units = Enum('cm', SUPPORTED_UNITS,
                 desc='Unit length for mesh dimensions')

    length = Int(5, desc='Length of rectangular domain mesh')

    width = Int(10, desc='Width of rectangular domain mesh')

    radius = Int(5, desc='Radius of cylinder domain mesh')

    height = Int(20, desc='Height of domain mesh')

    resolution = Int(5, desc="Number of mesh elements per unit length")

    path = File(desc='File path to stl-file')

    inside_location = List(
        Float, value=[0, 0, 0], maxlen=3, minlen=3,
        desc="Declare a point within the uploaded geometry"
        " which is inside the desired volume"
    )

    traits_view = View(
        Item('mesh_type'),
        Item('units'),
        Item('length', visible_when="mesh_type=='Rectangular'"),
        Item('width', visible_when="mesh_type=='Rectangular'"),
        Item('radius', visible_when="mesh_type=='Cylinder'"),
        Item('height', visible_when="mesh_type!='Complex'"),
        Item('resolution'),
        Item('path', visible_when="mesh_type=='Complex'"),
        Item('inside_location', visible_when="mesh_type=='Complex'")
    )
Exemplo n.º 16
0
class BaseViewer(HasTraits):
    reconstruction = Instance(Component)
    image = Array
    result = Array
    save_file = File(exists=False, auto_set=False, enter_set=True)
    save_button = Button('Save Result as .npy')

    def __init__(self, **kwargs):
        HasTraits.__init__(self, **kwargs)

    def _reconstruction_default(self):
        self.plot_data = ArrayPlotData(original=self.image,
                                       reconstruction=self.result)

        rows, cols = self.image.shape[:2]
        aspect = cols / float(rows)

        old = Plot(self.plot_data)
        old.img_plot('original', colormap=gray, origin='top left')
        old.title = 'Old'
        old.aspect_ratio = aspect

        self.new = Plot(self.plot_data)
        self.new.img_plot('reconstruction', colormap=gray, origin='top left')
        self.new.title = 'New'
        self.new.aspect_ratio = aspect

        container = HPlotContainer(bgcolor='none')
        container.add(old)
        container.add(self.new)

        return container

    def update_plot(self):
        self.plot_data.set_data('reconstruction', self.result)
        self.new.request_redraw()

    def _save_button_changed(self):
        try:
            np.save(self.save_file, self.result)
        except IOError, e:
            message('Could not save file: %s' % str(e))

        try:
            f = open(self.save_file + '.txt', 'w')
            f.write(str(self))
            f.close()
        except IOError:
            message('Could not save file: %s' % str(e))
Exemplo n.º 17
0
class MarkerPointSource(MarkerPoints):  # noqa: D401
    """MarkerPoints subclass for source files."""

    file = File(filter=mrk_wildcard, exists=True)
    name = Property(Str, depends_on='file')
    dir = Property(Str, depends_on='file')

    use = List(list(range(5)),
               desc="Which points to use for the interpolated "
               "marker.")
    enabled = Property(Bool, depends_on=['points', 'use'])
    clear = Button(desc="Clear the current marker data")
    edit = Button(desc="Edit the marker coordinates manually")

    view = mrk_view_basic

    @cached_property
    def _get_enabled(self):
        return np.any(self.points)

    @cached_property
    def _get_dir(self):
        if self.file:
            return os.path.dirname(self.file)

    @cached_property
    def _get_name(self):
        if self.file:
            return os.path.basename(self.file)

    @on_trait_change('file')
    def load(self, fname):
        if not fname:
            self.reset_traits(['points'])
            return

        try:
            pts = read_mrk(fname)
        except Exception as err:
            error(None, str(err), "Error Reading mrk")
            self.reset_traits(['points'])
        else:
            self.points = pts

    def _clear_fired(self):
        self.reset_traits(['file', 'points', 'use'])

    def _edit_fired(self):
        self.edit_traits(view=mrk_view_edit)
Exemplo n.º 18
0
class td_import( time_data_import ):
    """
    Import of `*.td` data as saved by earlier versions
    """

    #: Name of the comma delimited file to import.
    from_file = File(filter = ['*.td'], 
        desc = "name of the *.td file to import")

    traits_view = View(
        ['from_file', 
            '|[Import]'
        ], 
        title  = 'Time data', 
        buttons = OKCancelButtons
                    )

    def get_data (self, td):
        """
        Main work is done here: imports the data from `*.td` file into
        TimeSamples object `td` and saves also a `*.h5` file so this import
        need not be performed only once.
        """
        if not path.isfile(self.from_file):
            # no file there
            time_data_import.get_data(self, td)
            return
        f = open(self.from_file, 'rb')
        h = pickle.load(f)
        f.close()
        sample_freq = h['sample_freq']
        data = h['data']
        numchannels = data.shape[1]
        name = td.name
        if name == "":
            name = path.join(td_dir, \
                        path.splitext(path.basename(self.from_file))[0]+'.h5')
        else:
            if td.h5f !=  None:
                td.h5f.close()
        # TODO problems with already open h5 files from other instances
        f5h = tables.open_file(name, mode = 'w')
        ac = f5h.create_earray(f5h.root, 'time_data', \
            tables.atom.Float32Atom(), (0, numchannels))
        ac.set_attr('sample_freq', sample_freq)
        ac.append(data)
        f5h.close()
        td.name = name
        td.load_data()
Exemplo n.º 19
0
class GaussianSmoothing(capsul.api.Process):
    """3D Gaussian smoothing filter using the recursive Deriche method"""

    input_image = File(
        Undefined, output=False, allowed_extensions=VOLUME_EXTENSIONS,
        desc="input image")
    xsigma = Float(Undefined, output=False, optional=True,
                   desc="X standard deviation of the gaussian filter "
                   "[default=largest voxel size]")
    ysigma = Float(Undefined, output=False, optional=True,
                   desc="Y standard deviation of the gaussian filter "
                   "[default=largest voxel size]")
    zsigma = Float(Undefined, output=False, optional=True,
                   desc="Z standard deviation of the gaussian filter "
                   "[default=largest voxel size]")

    output_image = File(
        Undefined, output=True, allowed_extensions=VOLUME_EXTENSIONS,
        desc="Gaussian-filtered image"
    )

    def get_commandline(self):
        sigma_args = []
        if self.xsigma is not Undefined:
            sigma_args += ["--xsigma", str(self.xsigma)]
        if self.ysigma is not Undefined:
            sigma_args += ["--ysigma", str(self.ysigma)]
        if self.zsigma is not Undefined:
            sigma_args += ["--zsigma", str(self.zsigma)]
        return [
            "bv_env",  # needed to set DYLD_* in environment on Mac OS 10.11+
            "AimsGaussianSmoothing",
            "--input", self.input_image
        ] + sigma_args + [
            "--output", self.output_image
        ]
Exemplo n.º 20
0
 def __init__(self, study_config, configuration):
     super(FSLConfig, self).__init__(study_config, configuration)
     self.study_config.add_trait(
         'fsl_config',
         File(Undefined,
              output=False,
              desc='Parameter to specify the fsl.sh path'))
     self.study_config.add_trait(
         'fsl_prefix',
         String(Undefined, desc='Prefix to add to FSL commands'))
     self.study_config.add_trait(
         'use_fsl',
         Bool(Undefined,
              output=False,
              desc='Parameter to tell that we need to configure FSL'))
Exemplo n.º 21
0
class CharacterCount(HasTraits):

    filename = File()

    traits_view = View('filename', width=400)

    def _filename_changed(self):
        fname = self.filename
        try:
            with open(fname, 'rb') as f:
                n = len(f.read())
            print '"{:s}" has {:d} chars.'.format(fname, n)
        except IOError:
            # Ignore file errors.
            pass
Exemplo n.º 22
0
class ConvertDataType(capsul.api.Process):
    """Convert the data type of an image"""

    input_image = File(Undefined,
                       output=False,
                       allowed_extensions=VOLUME_EXTENSIONS,
                       desc="input image")
    data_type = Enum("CDOUBLE",
                     "CFLOAT",
                     "DOUBLE",
                     "FLOAT",
                     "HSV",
                     "POINT3DF",
                     "RGB",
                     "RGBA",
                     "S16",
                     "S32",
                     "S8",
                     "U16",
                     "U32",
                     "U8",
                     "VECTOR_OF_3_SHORT",
                     "VECTOR_OF_6_FLOAT",
                     output=False,
                     desc="output data type")

    output_image = File(Undefined,
                        output=True,
                        allowed_extensions=VOLUME_EXTENSIONS,
                        desc="output label volume with S32 datatype")

    def get_commandline(self):
        return [
            "AimsFileConvert", "--type", self.data_type, "--input",
            self.input_image, "--output", self.output_image
        ]
Exemplo n.º 23
0
class EuclideanUpwindingAlongGradient(capsul.api.Process):
    """Compute distance to a boundary along the gradient of a scalar field."""

    domain = File(Undefined,
                  output=False,
                  allowed_extensions=VOLUME_EXTENSIONS,
                  desc="label image defining the computation domain")
    field = File(Undefined,
                 output=False,
                 allowed_extensions=VOLUME_EXTENSIONS,
                 desc="scalar field whose gradient is used as the integration "
                 "direction")
    downfield = Bool(
        False,
        optional=False,
        desc="work on inverted field (downfield instead of upfield)")
    domain_label = Int(100,
                       optional=True,
                       desc="label of the propagation domain")
    origin_label = Int(0, optional=True, desc="label of the origin object")
    verbosity = Int(1, output=False, optional=True, desc="Verbosity level")

    output = File(Undefined,
                  output=True,
                  allowed_extensions=VOLUME_EXTENSIONS,
                  desc="output volume containing the distance")

    def get_commandline(self):
        return [
            "ylUpwindDistance", "--domain", self.domain, "--field", self.field,
            "--invert",
            str(self.downfield), "--domain-label",
            str(self.domain_label), "--origin-label",
            str(self.origin_label), "--verbose",
            str(self.verbosity), "--output", self.output
        ]
Exemplo n.º 24
0
class MatlabProcess(Process):
    output_config = File(output=True,
                         desc='output file to write config',
                         allowed_extensions=['.json'])

    def requirements(self):
        return {'matlab': 'any'}

    def _run_process(self):
        import capsul.engine
        mconf = capsul.engine.configurations.get('capsul.engine.module.matlab')
        with open(self.output_config, 'w') as f:
            json.dump(mconf, f)
        if not mconf:
            raise RuntimeError('Matlab config is not present')
Exemplo n.º 25
0
class BinarizeCortex(capsul.api.Process):
    """Extract a binary image (0/1) of the cortex"""

    classif = File(
        Undefined, output=False, allowed_extensions=VOLUME_EXTENSIONS,
        desc="classification image of the cortex (100 inside, 0 in CSF, "
        "200 in white matter)")

    output_image = File(
        Undefined, output=True, allowed_extensions=VOLUME_EXTENSIONS,
        desc="binary image of the cortex (1 in the cortex, 0 elsewhere)"
    )

    def get_commandline(self):
        return [
            "bv_env",  # needed to set DYLD_* in environment on Mac OS 10.11+
            "AimsThreshold",
            "--verbose", "0",
            "-b",
            "--fg", "1",
            "-m", "eq",
            "-t", "100",
            "--input", self.classif,
            "--output", self.output_image]
Exemplo n.º 26
0
class MergeImagesAllToOne(capsul.api.Process):
    """Merge values into an image using a mask image."""

    input_image = File(Undefined,
                       output=False,
                       allowed_extensions=VOLUME_EXTENSIONS,
                       desc="input image")
    mask_image = File(Undefined,
                      output=False,
                      allowed_extensions=VOLUME_EXTENSIONS,
                      desc="mask image (must have an integer voxel type)")
    value = Float(Undefined, output=False, desc="replacement value")

    output_image = File(Undefined,
                        output=True,
                        allowed_extensions=VOLUME_EXTENSIONS,
                        desc="output image")

    def get_commandline(self):
        return [
            "AimsMerge", "--mode", "ao", "--value",
            repr(self.value), "--input", self.input_image, "--Mask",
            self.mask_image, "--output", self.output_image
        ]
Exemplo n.º 27
0
class MergeImagesSameValues(capsul.api.Process):
    """Merge values into an image using a mask image."""

    input_image = File(
        Undefined, output=False, allowed_extensions=VOLUME_EXTENSIONS,
        desc="input image")
    mask_image = File(
        Undefined, output=False, allowed_extensions=VOLUME_EXTENSIONS,
        desc="mask image (must have an integer voxel type)")

    output_image = File(
        Undefined, output=True, allowed_extensions=VOLUME_EXTENSIONS,
        desc="output image"
    )

    def get_commandline(self):
        return [
            "bv_env",  # needed to set DYLD_* in environment on Mac OS 10.11+
            "AimsMerge",
            "--mode", "sv",
            "--input", self.input_image,
            "--Mask", self.mask_image,
            "--output", self.output_image
        ]
Exemplo n.º 28
0
    def __init__(self, study_config, configuration):

        super(SomaWorkflowConfig, self).__init__(study_config, configuration)
        study_config.add_trait(
            'use_soma_workflow',
            Bool(False,
                 output=False,
                 desc='Use soma workflow for the execution',
                 groups=['soma-workflow']))
        study_config.add_trait(
            'somaworkflow_computing_resource',
            Str(Undefined,
                output=False,
                desc=
                'Soma-workflow computing resource to be used to run processing',
                groups=['soma-workflow']))
        study_config.add_trait(
            'somaworkflow_config_file',
            File(Undefined,
                 output=False,
                 optional=True,
                 desc='Soma-Workflow configuration file. '
                 'Default: $HOME/.soma_workflow.cfg',
                 groups=['soma-workflow']))
        study_config.add_trait(
            'somaworkflow_keep_failed_workflows',
            Bool(True,
                 desc='Keep failed workflows after pipeline execution through '
                 'StudyConfig',
                 groups=['soma-workflow']))
        study_config.add_trait(
            'somaworkflow_keep_succeeded_workflows',
            Bool(False,
                 desc='Keep succeeded workflows after pipeline execution '
                 'through StudyConfig',
                 groups=['soma-workflow']))
        study_config.add_trait(
            'somaworkflow_computing_resources_config',
            ControllerTrait(OpenKeyController(
                value_trait=ControllerTrait(ResourceController(),
                                            output=False,
                                            allow_none=False,
                                            desc='Computing resource config')),
                            output=False,
                            allow_none=False,
                            desc='Computing resource config',
                            groups=['soma-workflow']))
        self.study_config.modules_data.somaworkflow = {}
Exemplo n.º 29
0
class FileEditorDemo(HasTraits):
    """ Defines the main FileEditor demo class. """

    # Define a File trait to view:
    file_name = File()

    # Display specification (one Item per editor style):
    file_group = Group(Item('file_name', style='simple', label='Simple'),
                       Item('_'),
                       Item('file_name', style='custom', label='Custom'),
                       Item('_'), Item('file_name', style='text',
                                       label='Text'), Item('_'),
                       Item('file_name', style='readonly', label='ReadOnly'))

    # Demo view:
    view = View(file_group, title='FileEditor', buttons=['OK'], resizable=True)
Exemplo n.º 30
0
class FigureInspectorData(FigureInspector):
    """See :class:`Figure`. In adition.. defines a filename attribute.. ta load images from file
    """
    filename = File()

    traits_view = View('filename',
                       Group(Item('container',
                                  editor=ComponentEditor(size=size,
                                                         bgcolor=bg_color),
                                  show_label=False),
                             orientation="vertical"),
                       resizable=True)

    def _filename_changed(self, new):
        image = ImageData.fromfile(new)
        self.plot_image(image._data)
Exemplo n.º 31
0
class FiducialsSource(HasTraits):
    """Expose points of a given fiducials fif file.

    Parameters
    ----------
    file : File
        Path to a fif file with fiducials (*.fif).

    Attributes
    ----------
    points : Array, shape = (n_points, 3)
        Fiducials file points.
    """

    file = File(filter=[fid_wildcard])
    fname = Property(depends_on='file')
    points = Property(depends_on='file')

    @cached_property
    def _get_fname(self):
        fname = os.path.basename(self.file)
        return fname

    @cached_property
    def _get_points(self):
        if not os.path.exists(self.file):
            return None

        try:
            points = np.zeros((3, 3))
            fids, _ = read_fiducials(self.file)
            for fid in fids:
                ident = fid['ident']
                if ident == FIFF.FIFFV_POINT_LPA:
                    points[0] = fid['r']
                elif ident == FIFF.FIFFV_POINT_NASION:
                    points[1] = fid['r']
                elif ident == FIFF.FIFFV_POINT_RPA:
                    points[2] = fid['r']
            return points
        except Exception as err:
            error(
                None, "Error reading fiducials from %s: %s (See terminal "
                "for more information)" % (self.fname, str(err)),
                "Error Reading Fiducials")
            self.reset_traits(['file'])
            raise