Exemple #1
0
class PlotConfig(t.HasTraits):
    saturated_pixels = t.CFloat(0.05,
                                label='Saturated pixels',
                                desc='Set the default saturated pixels value '
                                'for plotting images.'
                                )
    cmap_navigator = t.Enum(list(cmap_d.keys()),
                            label='Color map navigator',
                            desc='Set the default color map for the navigator.',
                            )
    cmap_signal = t.Enum(list(cmap_d.keys()),
                         label='Color map signal',
                         desc='Set the default color map for the signal plot.',
                         )
    dims_024_increase = t.Str('right',
                              label='Navigate right'
                              )
    dims_024_decrease = t.Str('left',
                              label='Navigate left',
                              )
    dims_135_increase = t.Str('down',
                              label='Navigate down',
                              )
    dims_135_decrease = t.Str('up',
                              label='Navigate up',
                              )
    modifier_dims_01 = t.Enum(['ctrl', 'alt', 'shift', 'ctrl+alt', 'ctrl+shift', 'alt+shift',
                               'ctrl+alt+shift'], label='Modifier key for 1st and 2nd dimensions')  # 0 elem is default
    modifier_dims_23 = t.Enum(['shift', 'alt', 'ctrl', 'ctrl+alt', 'ctrl+shift', 'alt+shift',
                               'ctrl+alt+shift'], label='Modifier key for 3rd and 4th dimensions')  # 0 elem is default
    modifier_dims_45 = t.Enum(['alt', 'ctrl', 'shift', 'ctrl+alt', 'ctrl+shift', 'alt+shift',
                               'ctrl+alt+shift'], label='Modifier key for 5th and 6th dimensions')  # 0 elem is default
class config(BaseWorkflowConfig):
    uuid = traits.Str(desc="UUID")
    desc = traits.Str(desc="Workflow Description")
    # Directories
    sink_dir = Directory(os.path.abspath('.'),
                         mandatory=True,
                         desc="Location where the BIP will store the results")
    surf_dir = Directory(mandatory=True, desc="Freesurfer subjects directory")
    save_script_only = traits.Bool(False)

    datagrabber = traits.Instance(Data, ())
    projection_stem = traits.Str('-projfrac-avg 0 1 0.1',
                                 desc='how to project data onto the surface')
    out_type = traits.Enum('mat', 'hdf5', desc='mat or hdf5')
    hdf5_package = traits.Enum('h5py',
                               'pytables',
                               desc='which hdf5 package to use')
    target_surf = traits.Enum('fsaverage4',
                              'fsaverage3',
                              'fsaverage5',
                              'fsaverage6',
                              'fsaverage',
                              'subject',
                              desc='which average surface to map to')
    surface_fwhm = traits.List([5],
                               traits.Float(),
                               mandatory=True,
                               usedefault=True,
                               desc="How much to smooth on target surface")
    roiname = traits.String('amygdala')
    use_advanced_options = Bool(False)
    advanced_options = traits.Code()
Exemple #3
0
class GeneralConfig(t.HasTraits):
    default_file_format = t.Enum(
        'hdf5',
        'rpl',
        desc='Using the hdf5 format is highly reccomended because is the '
        'only one fully supported. The Ripple (rpl) format it is useful '
        'tk is provided for when none of the other toolkits are'
        ' available. However, when using this toolkit the '
        'user interface elements are not available. '
        'to export data to other software that do not support hdf5')
    default_export_format = t.Enum(
        *default_write_ext,
        desc='Using the hdf5 format is highly reccomended because is the '
        'only one fully supported. The Ripple (rpl) format it is useful '
        'to export data to other software that do not support hdf5')
    hspy_extension = t.CBool(
        False,
        desc='If enabled, HyperSpy will use the "hspy" extension when saving '
        'to HDF5 instead of the "hdf5" extension. "hspy" will be the default'
        'extension from HyperSpy v1.3')
    interactive = t.CBool(
        True,
        desc='If enabled, HyperSpy will prompt the user when options are '
        'available, otherwise it will use the default values if possible')
    logger_on = t.CBool(
        False,
        label='Automatic logging',
        desc='If enabled, HyperSpy will store a log in the current directory '
        'of all the commands typed')

    show_progressbar = t.CBool(
        True,
        label='Show progress bar',
        desc='If enabled, show a progress bar when available')

    dtb_expand_structures = t.CBool(
        True,
        label='Expand structures in DictionaryTreeBrowser',
        desc='If enabled, when printing DictionaryTreeBrowser (e.g. '
        'metadata), long lists and tuples will be expanded and any '
        'dictionaries in them will be printed similar to '
        'DictionaryTreeBrowser, but with double lines')
    logging_level = t.Enum([
        'CRITICAL',
        'ERROR',
        'WARNING',
        'INFO',
        'DEBUG',
    ],
                           desc='the log level of all hyperspy modules.')
    parallel = t.CBool(
        True, desc='Use parallel threads for computations by default.')

    lazy = t.CBool(False, desc='Load data lazily by default.')

    def _logger_on_changed(self, old, new):
        if new is True:
            turn_logging_on()
        else:
            turn_logging_off()
Exemple #4
0
class GeneralConfig(t.HasTraits):
    default_file_format = t.Enum(
        'hdf5',
        'rpl',
        desc='Using the hdf5 format is highly reccomended because is the '
        'only one fully supported. The Ripple (rpl) format it is useful '
        'to export data to other software that do not support hdf5')
    default_export_format = t.Enum(
        *default_write_ext,
        desc='Using the hdf5 format is highly reccomended because is the '
        'only one fully supported. The Ripple (rpl) format it is useful '
        'to export data to other software that do not support hdf5')
    plot_on_load = t.CBool(
        False,
        desc='If enabled, the object will be plot automatically on loading')
    interactive = t.CBool(
        True,
        desc='If enabled, Hyperspy will prompt the user when optios are '
        'available, otherwise it will use the default values if possible')
    logger_on = t.CBool(
        False,
        label='Automatic logging',
        desc='If enabled, Hyperspy will store a log in the current directory '
        'of all the commands typed')

    def _logger_on_changed(self, old, new):
        if new is True:
            turn_logging_on()
        else:
            turn_logging_off()
class config(HasTraits):
    uuid = traits.Str(desc="UUID")
    desc = traits.Str(desc='Workflow description')
    # Directories
    working_dir = Directory(mandatory=True, desc="Location of the Nipype working directory")
    crash_dir = Directory(mandatory=False, desc="Location to store crash files")
    sink_dir = Directory(mandatory=True, desc="Location to store results")
    save_script_only = traits.Bool(False)
    # Execution

    run_using_plugin = Bool(False, usedefault=True, desc="True to run pipeline with plugin, False to run serially")
    plugin = traits.Enum("PBS", "PBSGraph","MultiProc", "SGE", "Condor",
        usedefault=True,
        desc="plugin to use, if run_using_plugin=True")
    plugin_args = traits.Dict({"qsub_args": "-q many"},
        usedefault=True, desc='Plugin arguments.')
    # Subjects
    interpolation = traits.Enum('trilinear','nearestneighbour','sinc',usedefault=True)
    name = traits.String('flirt_output',desc='name of folder to store flirt mats')
    datagrabber_create = traits.Instance(Data, ())
    datagrabber_apply = traits.Instance(Data, ())
    create_transform = traits.Bool(True)
    apply_transform = traits.Bool(False)
    # Advanced Options
    use_advanced_options = traits.Bool()
    advanced_script = traits.Code()
Exemple #6
0
class PlotConfig(t.HasTraits):
    dims_024_increase = t.Str('right', label='Navigate right')
    dims_024_decrease = t.Str(
        'left',
        label='Navigate left',
    )
    dims_135_increase = t.Str(
        'down',
        label='Navigate down',
    )
    dims_135_decrease = t.Str(
        'up',
        label='Navigate up',
    )
    modifier_dims_01 = t.Enum(
        [
            'ctrl', 'alt', 'shift', 'ctrl+alt', 'ctrl+shift', 'alt+shift',
            'ctrl+alt+shift'
        ],
        label='Modifier key for 1st and 2nd dimensions')  # 0 elem is default
    modifier_dims_23 = t.Enum(
        [
            'shift', 'alt', 'ctrl', 'ctrl+alt', 'ctrl+shift', 'alt+shift',
            'ctrl+alt+shift'
        ],
        label='Modifier key for 3rd and 4th dimensions')  # 0 elem is default
    modifier_dims_45 = t.Enum(
        [
            'alt', 'ctrl', 'shift', 'ctrl+alt', 'ctrl+shift', 'alt+shift',
            'ctrl+alt+shift'
        ],
        label='Modifier key for 5th and 6th dimensions')  # 0 elem is default
class AxisSelector(traits.HasTraits):
    """here we select what axes the user should use when plotting this data """
    masterList = traits.List
    masterListWithNone =  traits.List
    xAxis = traits.Enum(values="masterList")
    yAxis = traits.Enum(values="masterList")
    series = traits.Enum(values="masterListWithNone")
    
    traits_view=traitsui.View(traitsui.VGroup(traitsui.Item("xAxis",label="x axis"),traitsui.Item("yAxis",label="y axis"),
                                  traitsui.Item("series",label="series"),show_border=True, label="axes selection"))
    
    def __init__(self, **traitsDict):
        """allows user to select which axes are useful for plotting in this log"""
        super(AxisSelector, self).__init__(**traitsDict)
    
    
    def _masterList_default(self):
        """gets the header row of log file which are interpreted as the column
        names that can be plotted."""
        logger.info("updating master list of axis choices")
        logger.debug("comment file = %s" % self.logFile)
        logger.debug( "comment file = %s" % self.logFile)
        if not os.path.exists(self.logFile):
            return []
        try:
            with open(self.logFile) as csvfile:
                headerReader = csv.reader(csvfile)
                headerRow=headerReader.next()
            return headerRow
        except IOError:
            return []
            
    def _masterListWithNone_default(self):
        return ["None"]+self._masterList_default()
Exemple #8
0
class config(HasTraits):
    uuid = traits.Str(desc="UUID")

    # Directories
    working_dir = Directory(mandatory=True, desc="Location of the Nipype working directory")
    sink_dir = Directory(mandatory=True, desc="Location where the BIP will store the results")
    crash_dir = Directory(mandatory=False, desc="Location to store crash files")
    save_script_only = traits.Bool(False)

    # Execution
    run_using_plugin = Bool(False, usedefault=True, desc="True to run pipeline with plugin, False to run serially")
    plugin = traits.Enum("PBS", "MultiProc", "SGE", "Condor",
        usedefault=True,
        desc="plugin to use, if run_using_plugin=True")
    plugin_args = traits.Dict({"qsub_args": "-q many"},
        usedefault=True, desc='Plugin arguments.')
    test_mode = Bool(False, mandatory=False, usedefault=True,
        desc='Affects whether where and if the workflow keeps its \
                            intermediary files. True to keep intermediary files. ')
    # DataGrabber
    datagrabber = datagrabber = traits.Instance(Data, ())

    # filter
    highpass_freq = traits.Float()
    lowpass_freq = traits.Float()
    filtering_algorithm = traits.Enum("fsl","IIR","FIR","Fourier")
    tr = traits.Float()
class config(HasTraits):
    uuid = traits.Str(desc="UUID")
    desc = traits.Str(desc="Workflow Description")
    # Directories
    working_dir = Directory(mandatory=True, desc="Location of the Nipype working directory")
    sink_dir = Directory(os.path.abspath('.'), mandatory=True, desc="Location where the BIP will store the results")
    crash_dir = Directory(mandatory=False, desc="Location to store crash files")
    surf_dir = Directory(mandatory=True, desc= "Freesurfer subjects directory")
    save_script_only = traits.Bool(False)
    # Execution

    run_using_plugin = Bool(False, usedefault=True, desc="True to run pipeline with plugin, False to run serially")
    plugin = traits.Enum("PBS", "MultiProc", "SGE", "Condor",
        usedefault=True,
        desc="plugin to use, if run_using_plugin=True")
    plugin_args = traits.Dict({"qsub_args": "-q many"},
        usedefault=True, desc='Plugin arguments.')
    test_mode = Bool(False, mandatory=False, usedefault=True,
        desc='Affects whether where and if the workflow keeps its \
                            intermediary files. True to keep intermediary files. ')
    timeout = traits.Float(14.0)
    datagrabber = traits.Instance(Data, ())
    surface_template = traits.Enum("fsaverage","fsaverage5","fsaverage6","fsaverage4","subject")
    test_name = traits.String('FS_one_sample_t_test')
    # First Level
    #advanced_options
    use_advanced_options = Bool(False)
    advanced_options = traits.Code()
Exemple #10
0
class config(HasTraits):
    files = traits.List(traits.File)
    pattern = traits.Str()
    base_directory = Directory
    use_pattern = traits.Bool(False)
    hemi = traits.Enum('lh', 'rh')
    surface = traits.Enum('white', 'inflated')
    target = traits.Enum('fsaverage5', 'fsaverage4', 'fsaverage')
class config(HasTraits):
    uuid = traits.Str(desc="UUID")
    desc = traits.Str(desc="Workflow Description")
    # Directories
    working_dir = Directory(mandatory=True,
                            desc="Location of the Nipype working directory")
    sink_dir = Directory(os.path.abspath('.'),
                         mandatory=True,
                         desc="Location where the BIP will store the results")
    crash_dir = Directory(mandatory=False,
                          desc="Location to store crash files")
    surf_dir = Directory(mandatory=True, desc="Freesurfer subjects directory")

    # Execution

    run_using_plugin = Bool(
        False,
        usedefault=True,
        desc="True to run pipeline with plugin, False to run serially")
    plugin = traits.Enum("PBS",
                         "MultiProc",
                         "SGE",
                         "Condor",
                         usedefault=True,
                         desc="plugin to use, if run_using_plugin=True")
    plugin_args = traits.Dict({"qsub_args": "-q many"},
                              usedefault=True,
                              desc='Plugin arguments.')
    test_mode = Bool(
        False,
        mandatory=False,
        usedefault=True,
        desc='Affects whether where and if the workflow keeps its \
                            intermediary files. True to keep intermediary files. '
    )
    timeout = traits.Float(14.0)
    datagrabber = traits.Instance(Data, ())
    projection_stem = traits.Str('-projfrac-avg 0 1 0.1',
                                 desc='how to project data onto the surface')
    out_type = traits.Enum('mat', 'hdf5', desc='mat or hdf5')
    hdf5_package = traits.Enum('h5py',
                               'pytables',
                               desc='which hdf5 package to use')
    target_surf = traits.Enum('fsaverage4',
                              'fsaverage3',
                              'fsaverage5',
                              'fsaverage6',
                              'fsaverage',
                              'subject',
                              desc='which average surface to map to')
    surface_fwhm = traits.List([5],
                               traits.Float(),
                               mandatory=True,
                               usedefault=True,
                               desc="How much to smooth on target surface")
    roiname = traits.String('amygdala')
    use_advanced_options = Bool(False)
    advanced_options = traits.Code()
Exemple #12
0
class config(HasTraits):
    uuid = traits.Str(desc="UUID")

    # Directories
    working_dir = Directory(mandatory=True,
                            desc="Location of the Nipype working directory")
    base_dir = Directory(
        os.path.abspath('.'),
        mandatory=True,
        desc='Base directory of data. (Should be subject-independent)')
    sink_dir = Directory(mandatory=True,
                         desc="Location where the BIP will store the results")
    crash_dir = Directory(mandatory=False,
                          desc="Location to store crash files")

    # Execution
    run_using_plugin = Bool(
        False,
        usedefault=True,
        desc="True to run pipeline with plugin, False to run serially")
    plugin = traits.Enum("PBS",
                         "MultiProc",
                         "SGE",
                         "Condor",
                         usedefault=True,
                         desc="plugin to use, if run_using_plugin=True")
    plugin_args = traits.Dict({"qsub_args": "-q many"},
                              usedefault=True,
                              desc='Plugin arguments.')
    test_mode = Bool(
        False,
        mandatory=False,
        usedefault=True,
        desc='Affects whether where and if the workflow keeps its \
                            intermediary files. True to keep intermediary files. '
    )
    timeout = traits.Float(14.0)
    datagrabber = traits.Instance(Data, ())
    run_mode = traits.Enum("flame1", "ols", "flame12")
    save_script_only = traits.Bool(False)
    #Normalization
    brain_mask = traits.File(mandatory=True, desc='Brain Mask')
    name_of_project = traits.String("group_analysis", usedefault=True)
    do_randomize = traits.Bool(True)
    num_iterations = traits.Int(5000)

    #Correction:
    run_correction = traits.Bool(True)
    z_threshold = traits.Float(2.3)
    p_threshold = traits.Float(0.05)
    connectivity = traits.Int(26)

    # Advanced Options
    use_advanced_options = traits.Bool()
    advanced_script = traits.Code()

    # Buttons
    check_func_datagrabber = Button("Check")
Exemple #13
0
class config(HasTraits):
    uuid = traits.Str(desc="UUID")
    desc = traits.Str(desc='Workflow description')
    # Directories
    working_dir = Directory(mandatory=True,
                            desc="Location of the Nipype working directory")
    crash_dir = Directory(mandatory=False,
                          desc="Location to store crash files")
    sink_dir = Directory(mandatory=False,
                         desc="Location to store BIPS results")
    surf_dir = Directory(mandatory=True, desc="Freesurfer subjects directory")
    save_script_only = traits.Bool(False)
    # Execution

    run_using_plugin = Bool(
        False,
        usedefault=True,
        desc="True to run pipeline with plugin, False to run serially")
    plugin = traits.Enum("PBS",
                         "PBSGraph",
                         "MultiProc",
                         "SGE",
                         "Condor",
                         usedefault=True,
                         desc="plugin to use, if run_using_plugin=True")
    plugin_args = traits.Dict({"qsub_args": "-q many"},
                              usedefault=True,
                              desc='Plugin arguments.')
    # Subjects
    datagrabber = traits.Instance(Data, ())
    # Motion Correction

    do_slicetiming = Bool(True,
                          usedefault=True,
                          desc="Perform slice timing correction")
    SliceOrder = traits.List(traits.Int)
    TR = traits.Float(1.0, mandatory=True, desc="TR of functional")
    motion_correct_node = traits.Enum(
        'nipy',
        'fsl',
        'spm',
        'afni',
        desc="motion correction algorithm to use",
        usedefault=True,
    )
    use_metadata = traits.Bool(True)
    order = traits.Enum('motion_slicetime',
                        'slicetime_motion',
                        use_default=True)
    loops = traits.List([5], traits.Int(5), usedefault=True)
    #between_loops = traits.Either("None",traits.List([5]),usedefault=True)
    speedup = traits.List([5], traits.Int(5), usedefault=True)
    # Advanced Options
    use_advanced_options = traits.Bool()
    advanced_script = traits.Code()
class MultiMeshMorpher(ta.HasTraits):
    visible = ta.Enum(values='_names')
    morph_target = ta.Enum(values='_names')
    morph_alpha = ta.Range(0.0, 1.0, 0.0)
    show_edges = ta.Bool(False)
    _names = ta.List()

    def __init__(self, list_verts, tris, names=None, fig=None, **kw):
        super(MultiMeshMorpher, self).__init__(**kw)
        self._list_verts = list_verts
        self._tris = tris

        if fig is None:
            self._fig = mlab.figure(bgcolor=(1, 1, 1))
        else:
            self._fig = fig

        if names is None:
            names = map(str, range(len(list_verts)))
        self._names = list(names)

        self._verts_by_name = dict(zip(self._names, list_verts))
        self._actor, self._pd = mesh_as_vtk_actor(list_verts[0], tris, return_polydata=True)
        self._actor.property.set(
            ambient=0.0,
            specular=0.15, 
            specular_power=128., 
            diffuse=0.8,
        )
        self._fig.scene.add_actor(self._actor)

        self.visible = self._names[0]
        if len(self._names) > 1:
            self.morph_target = self._names[1]

    @ta.on_trait_change('visible, show_edges, morph_target, morph_alpha')
    def _update(self):
        self._actor.property.edge_visibility = self.show_edges
        v1 = self._verts_by_name[self.visible]
        if self.morph_alpha > 0:
            v2 = self._verts_by_name[self.morph_target]
            self._pd.points = v1 * (1 - self.morph_alpha) + v2 * self.morph_alpha
        else:
            self._pd.points = v1
        self._fig.scene.render()

    view = tu.View(
        tu.Group(
            tu.Item('visible'),
            tu.Item('morph_target'),
            tu.Item('morph_alpha'),
            tu.Item('show_edges', name='Wireframe'),
            label="Viewer"),
        title="MultiMeshMorpher"
    )
Exemple #15
0
class PlotConfig(t.HasTraits):
    saturated_pixels = t.CFloat(
        0.,
        label='Saturated pixels (deprecated)',
        desc='Warning: this is deprecated and will be removed in HyperSpy v2.0'
    )
    # Don't use t.Enum to list all possible matplotlib colormap to
    # avoid importing matplotlib and building the list of colormap
    # when importing hyperpsy
    cmap_navigator = t.Str(
        'gray',
        label='Color map navigator',
        desc='Set the default color map for the navigator.',
    )
    cmap_signal = t.Str(
        'gray',
        label='Color map signal',
        desc='Set the default color map for the signal plot.',
    )
    dims_024_increase = t.Str('right', label='Navigate right')
    dims_024_decrease = t.Str(
        'left',
        label='Navigate left',
    )
    dims_135_increase = t.Str(
        'down',
        label='Navigate down',
    )
    dims_135_decrease = t.Str(
        'up',
        label='Navigate up',
    )
    modifier_dims_01 = t.Enum(
        [
            'ctrl', 'alt', 'shift', 'ctrl+alt', 'ctrl+shift', 'alt+shift',
            'ctrl+alt+shift'
        ],
        label='Modifier key for 1st and 2nd dimensions')  # 0 elem is default
    modifier_dims_23 = t.Enum(
        [
            'shift', 'alt', 'ctrl', 'ctrl+alt', 'ctrl+shift', 'alt+shift',
            'ctrl+alt+shift'
        ],
        label='Modifier key for 3rd and 4th dimensions')  # 0 elem is default
    modifier_dims_45 = t.Enum(
        [
            'alt', 'ctrl', 'shift', 'ctrl+alt', 'ctrl+shift', 'alt+shift',
            'ctrl+alt+shift'
        ],
        label='Modifier key for 5th and 6th dimensions')  # 0 elem is default
    pick_tolerance = t.CFloat(
        7.5,
        label='Pick tolerance',
        desc='The pick tolerance of ROIs in screen pixels.')
Exemple #16
0
class MachineLearningConfig(t.HasTraits):
    export_factors_default_file_format = t.Enum(*default_write_ext)
    export_loadings_default_file_format = t.Enum(*default_write_ext)
    multiple_files = t.Bool(True,
        label = 'Export to multiple files',
        desc = 'If enabled, on exporting the PCA or ICA results one file'
               'per factor and loading will be created. Otherwise only two files'
               'will contain the factors and loadings')
    same_window = t.Bool(True,
        label = 'Plot components in the same window',
        desc = 'If enabled the principal and independent components will all'
               ' be plotted in the same window')
Exemple #17
0
class GeneralConfig(t.HasTraits):

    default_file_format = t.Enum(
        'hdf5',
        'rpl',
        desc='Using the hdf5 format is highly reccomended because is the '
        'only one fully supported. The Ripple (rpl) format it is useful '
        'to export data to other software that do not support hdf5')
    default_toolkit = t.Enum(
        "qt4",
        "gtk",
        "wx",
        "tk",
        "None",
        desc="Default toolkit for matplotlib and the user interface "
        "elements. "
        "When using gtk and tk the user interface elements are not"
        " available."
        "None is suitable to run headless. "
        "HyperSpy must be restarted for changes to take effect")
    default_export_format = t.Enum(
        *default_write_ext,
        desc='Using the hdf5 format is highly reccomended because is the '
        'only one fully supported. The Ripple (rpl) format it is useful '
        'to export data to other software that do not support hdf5')
    interactive = t.CBool(
        True,
        desc='If enabled, HyperSpy will prompt the user when options are '
        'available, otherwise it will use the default values if possible')
    logger_on = t.CBool(
        False,
        label='Automatic logging',
        desc='If enabled, HyperSpy will store a log in the current directory '
        'of all the commands typed')

    show_progressbar = t.CBool(
        True,
        label='Show progress bar',
        desc='If enabled, show a progress bar when available')

    dtb_expand_structures = t.CBool(
        True,
        label='Expand structures in DictionaryTreeBrowser',
        desc='If enabled, when printing DictionaryTreeBrowser (e.g. metadata), '
        'long lists and tuples will be expanded and any dictionaries in them will be '
        'printed similar to DictionaryTreeBrowser, but with double lines')

    def _logger_on_changed(self, old, new):
        if new is True:
            turn_logging_on()
        else:
            turn_logging_off()
Exemple #18
0
class PlotOptions(tr.HasTraits):
    columns_headers_list = tr.List([])
    x_axis = tr.Enum(values='columns_headers_list')
    y_axis = tr.Enum(values='columns_headers_list')
    x_axis_multiplier = tr.Enum(1, -1)
    y_axis_multiplier = tr.Enum(-1, 1)
    plot = tr.Button

    view = ui.View(
        ui.HGroup(ui.Item('x_axis'), ui.Item('x_axis_multiplier')),
        ui.HGroup(ui.Item('y_axis'), ui.Item('y_axis_multiplier')),
        ui.Item('plot', show_label=False)
    )
Exemple #19
0
class config(BaseWorkflowConfig):
    uuid = traits.Str(desc="UUID")
    desc = traits.Str(desc='Workflow description')
    # Directories
    base_dir = Directory(os.path.abspath('.'),mandatory=True, desc='Base directory of data. (Should be subject-independent)')
    sink_dir = Directory(os.path.abspath('.'),mandatory=True, desc="Location where the BIP will store the results")
    field_dir = Directory(desc="Base directory of field-map data (Should be subject-independent) \
                                                 Set this value to None if you don't want fieldmap distortion correction")
    surf_dir = Directory(mandatory=True, desc= "Freesurfer subjects directory")

    # Subjects

    subjects= traits.List(traits.Str, mandatory=True, usedefault=True,
        desc="Subject id's. Note: These MUST match the subject id's in the \
                                Freesurfer directory. For simplicity, the subject id's should \
                                also match with the location of individual functional files.")
    func_template = traits.String('%s/functional.nii.gz')
    run_datagrabber_without_submitting = traits.Bool(desc="Run the datagrabber without \
    submitting to the cluster")
    timepoints_to_remove = traits.Int(0,usedefault=True)

    do_slicetiming = Bool(True, usedefault=True, desc="Perform slice timing correction")
    SliceOrder = traits.List(traits.Int)
    order = traits.Enum('motion_slicetime','slicetime_motion',use_default=True)
    TR = traits.Float(mandatory=True, desc = "TR of functional")
    motion_correct_node = traits.Enum('nipy','fsl','spm','afni',
        desc="motion correction algorithm to use",
        usedefault=True,)

    csf_prob = traits.File(desc='CSF_prob_map') 
    grey_prob = traits.File(desc='grey_prob_map')
    white_prob = traits.File(desc='white_prob_map')
    # Artifact Detection

    norm_thresh = traits.Float(1, min=0, usedefault=True, desc="norm thresh for art")
    z_thresh = traits.Float(3, min=0, usedefault=True, desc="z thresh for art")

    # Smoothing
    fwhm = traits.Float(6.0,usedefault=True)
    save_script_only = traits.Bool(False)
    check_func_datagrabber = Button("Check")

    def _check_func_datagrabber_fired(self):
        subs = self.subjects

        for s in subs:
            if not os.path.exists(os.path.join(self.base_dir,self.func_template % s)):
                print "ERROR", os.path.join(self.base_dir,self.func_template % s), "does NOT exist!"
                break
            else:
                print os.path.join(self.base_dir,self.func_template % s), "exists!"
Exemple #20
0
class GeneralConfig(t.HasTraits):
    default_file_format = t.Enum(
        'hdf5',
        'rpl',
        desc='Using the hdf5 format is highly reccomended because is the '
        'only one fully supported. The Ripple (rpl) format it is useful '
        'tk is provided for when none of the other toolkits are'
        ' available. However, when using this toolkit the '
        'user interface elements are not available. '
        'to export data to other software that do not support hdf5')
    default_toolkit = t.Enum(
        "qt4",
        "gtk",
        "wx",
        "tk",
        "None",
        desc="Default toolkit for matplotlib and the user interface "
        "elements. "
        "When using gtk and tk the user interface elements are not"
        " available."
        "user interface elements are not available. "
        "None is suitable to run headless. "
        "HyperSpy must be restarted for changes to take effect")
    default_export_format = t.Enum(
        *default_write_ext,
        desc='Using the hdf5 format is highly reccomended because is the '
        'only one fully supported. The Ripple (rpl) format it is useful '
        'to export data to other software that do not support hdf5')
    interactive = t.CBool(
        True,
        desc='If enabled, HyperSpy will prompt the user when optios are '
        'available, otherwise it will use the default values if possible')
    logger_on = t.CBool(
        False,
        label='Automatic logging',
        desc='If enabled, HyperSpy will store a log in the current directory '
        'of all the commands typed')

    show_progressbar = t.CBool(
        True,
        label='Show progress bar',
        desc='If enabled, show a progress bar when available')

    def _logger_on_changed(self, old, new):
        if new is True:
            turn_logging_on()
        else:
            turn_logging_off()
class config(HasTraits):

    # Directories
    working_dir = Directory(mandatory=True, desc="Location of the Nipype working directory")
    base_dir = Directory(mandatory=True, desc='Base directory of data. (Should be subject-independent)')
    sink_dir = Directory(mandatory=True, desc="Location where the BIP will store the results")
    crash_dir = Directory(mandatory=False, desc="Location to store crash files")

    # Execution
    run_using_plugin = Bool(False, usedefault=True, desc="True to run pipeline with plugin, False to run serially")
    plugin = traits.Enum("PBS", "PBSGraph","MultiProc", "SGE", "Condor",
        usedefault=True,
        desc="plugin to use, if run_using_plugin=True")
    plugin_args = traits.Dict({"qsub_args": "-q many"},
        usedefault=True, desc='Plugin arguments.')
    test_mode = Bool(False, mandatory=False, usedefault=True,
        desc='Affects whether where and if the workflow keeps its \
                            intermediary files. True to keep intermediary files. ')
    timeout = traits.Float(14.0)
    #Subjects
    subjects= traits.List(traits.Str, mandatory=True, usedefault=True,
        desc="Subject id's. Bips expects dicoms to be organized by subject id's")
    dicom_dir_template = traits.String('%s/dicoms/')

    #Conversion Options
    embed_meta = traits.Bool(True)
    info_only = traits.Bool(True)
    no_moco = traits.Bool(False,desc="only convert non-moco files")
    use_heuristic = traits.Bool(False)
    heuristic_file = traits.File(desc="heuristic file")
Exemple #22
0
class config(HasTraits):
    uuid = traits.Str(desc="UUID")
    desc = traits.Str(desc='Workflow description')
    # Directories
    working_dir = Directory(mandatory=True, desc="Location of the Nipype working directory")
    sink_dir = Directory(mandatory=True, desc="Location where the BIP will store the results")
    crash_dir = Directory(mandatory=False, desc="Location to store crash files")

    # Execution

    run_using_plugin = Bool(False, usedefault=True, desc="True to run pipeline with plugin, False to run serially")
    plugin = traits.Enum("PBS", "PBSGraph","MultiProc", "SGE", "Condor",
        usedefault=True,
        desc="plugin to use, if run_using_plugin=True")
    plugin_args = traits.Dict({"qsub_args": "-q many"},
        usedefault=True, desc='Plugin arguments.')
    test_mode = Bool(False, mandatory=False, usedefault=True,
        desc='Affects whether where and if the workflow keeps its \
                            intermediary files. True to keep intermediary files. ')
    # Subjects

    subjects= traits.List(traits.Str, mandatory=True, usedefault=True,
        desc="Subject id's. Note: These MUST match the subject id's in the \
                                Freesurfer directory. For simplicity, the subject id's should \
                                also match with the location of individual functional files.")
    # Preprocessing info
    preproc_config = traits.File(desc="preproc json file")

    #Advanced
    use_advanced_options = traits.Bool()
    advanced_script = traits.Code()
    save_script_only = traits.Bool(False)
Exemple #23
0
class config(HasTraits):
    uuid = traits.Str(desc="UUID")
    desc = traits.Str(desc='Workflow description')
    # Directories
    working_dir = Directory(mandatory=True,
                            desc="Location of the Nipype working directory")
    crash_dir = Directory(mandatory=False,
                          desc="Location to store crash files")
    save_script_only = traits.Bool(False)
    sink_dir = Directory(mandatory=True, desc="Location to store results")
    # Execution

    run_using_plugin = Bool(
        False,
        usedefault=True,
        desc="True to run pipeline with plugin, False to run serially")
    plugin = traits.Enum("PBS",
                         "PBSGraph",
                         "MultiProc",
                         "SGE",
                         "Condor",
                         usedefault=True,
                         desc="plugin to use, if run_using_plugin=True")
    plugin_args = traits.Dict({"qsub_args": "-q many"},
                              usedefault=True,
                              desc='Plugin arguments.')
    # Subjects
    datagrabber = traits.Instance(Data, ())
    name = traits.String('mean')
    # Advanced Options
    use_advanced_options = traits.Bool()
    advanced_script = traits.Code()
class config(BaseWorkflowConfig):
    uuid = traits.Str(desc="UUID")

    # Directories
    base_dir = Directory(os.path.abspath('.'),mandatory=True, desc='Base directory of data. (Should be subject-independent)')
    sink_dir = Directory(mandatory=True, desc="Location where the BIP will store the results")

    datagrabber = traits.Instance(Data, ())
    run_mode = traits.Enum("flame1","ols","flame12")
    save_script_only = traits.Bool(False)
    #Normalization
    brain_mask = traits.File(mandatory=True,desc='Brain Mask')
    name_of_project = traits.String("group_analysis",usedefault=True)
    do_randomize = traits.Bool(True)
    num_iterations = traits.Int(5000)

    #Correction:
    run_correction = traits.Bool(True)
    z_threshold = traits.Float(2.3)
    p_threshold = traits.Float(0.05)
    connectivity = traits.Int(26)

    # Advanced Options
    use_advanced_options = traits.Bool()
    advanced_script = traits.Code()

    # Buttons
    check_func_datagrabber = Button("Check")
Exemple #25
0
class Options(t.HasTraits):
    options = t.Enum(('a'))

    def __init__(self, options=None):
        if not options:
            options = ['a', 'b', 'c']
        self.options = options
class config(HasTraits):
    uuid = traits.Str(desc="UUID")
    desc = traits.Str(desc="Workflow Description")
    # Directories
    working_dir = Directory(mandatory=True, desc="Location of the Nipype working directory")
    sink_dir = Directory(os.path.abspath('.'), mandatory=True, desc="Location where the BIP will store the results")
    crash_dir = Directory(mandatory=False, desc="Location to store crash files")
    surf_dir = Directory(mandatory=True, desc= "Freesurfer subjects directory")

    # Execution

    run_using_plugin = Bool(False, usedefault=True, desc="True to run pipeline with plugin, False to run serially")
    plugin = traits.Enum("PBS", "MultiProc", "SGE", "Condor",
        usedefault=True,
        desc="plugin to use, if run_using_plugin=True")
    plugin_args = traits.Dict({"qsub_args": "-q many"},
        usedefault=True, desc='Plugin arguments.')
    test_mode = Bool(False, mandatory=False, usedefault=True,
        desc='Affects whether where and if the workflow keeps its \
                            intermediary files. True to keep intermediary files. ')
    timeout = traits.Float(14.0)
    subjects = traits.List(desc="subjects")
    split_files = traits.List(traits.File(),desc="""list of split files""")
    # First Level
    #advanced_options
    use_advanced_options = Bool(False)
    advanced_options = traits.Code()
Exemple #27
0
def getNode(_type,tr,config):
    from bips.workflows.flexible_datagrabber import Data, DataBase
    if _type == type(traits.Int()):
            col_type = colander.SchemaNode(colander.Int(),
                                           name=tr,description=config.trait(tr).desc)
    elif _type == type(traits.Float()):
        col_type = colander.SchemaNode(colander.Decimal(),name=tr)    
        
    elif _type == type(traits.String()) or _type==type(traits.Str()):
        col_type = colander.SchemaNode(colander.String(),name=tr)
        
    elif _type == type(traits.Enum('')):
        values=config.trait(tr).trait_type.values
        the_values = []
        for v in values:
            the_values.append((v,v))
        col_type = colander.SchemaNode(
            deform.Set(),
            widget=deform.widget.SelectWidget(values=the_values),
            name=tr)
    elif _type == type(traits.Bool()):
        col_type = colander.SchemaNode(colander.Boolean(),widget=deform.widget.CheckboxWidget(),name=tr)
    elif _type == type(traits.Code()):
        col_type = colander.SchemaNode(colander.String(),name=tr,widget=deform.widget.TextAreaWidget(cols=100,rows=20))
    elif _type == type(traits.Instance(Data,())):
        from bips.workflows.flexible_datagrabber import create_datagrabber_html_view
        col_type = create_datagrabber_html_view() 
    elif _type == type(traits.List()):
        col_type =get_list(_type,tr,config) 
    else:
        print "type: ", _type, "not found!"
        col_type = colander.SchemaNode(colander.String(),name=tr)
    return col_type
class LineValueVi3D(Viz3D):

    glyph_source = t.Enum('cone_source')

    def plot(self):
        m = self.ftv.mlab
        x, y, z, u, v, w, scalars, scale_factor = self.get_values()
        vectors = m.quiver3d(x, y, z, u, v, w, name='Bending moments')
        vectors.glyph.glyph_source.glyph_source = \
            vectors.glyph.glyph_source.glyph_dict[self.glyph_source]

        vectors.glyph.glyph.scale_factor = scale_factor
        vectors.glyph.glyph.clamping = False
        vectors.glyph.color_mode = 'color_by_scalar'
        vectors.glyph.glyph_source.glyph_source.center = np.array([0., 0., 0.])
        ds = vectors.mlab_source.dataset
        ds.point_data.scalars = scalars
        ds.point_data.scalars.name = 'bending moments'
        ds.point_data.vectors = np.c_[u, v, w]
        ds.point_data.vectors.name = 'directions'

        self.pipes['vectors'] = vectors

    def update(self, vot=0.0):
        x, y, z, u, v, w, scalars, scale_factor = self.get_values()
        vectors = self.pipes['vectors']
        vectors.mlab_source.set(x=x, y=y, z=z, u=u, v=v, w=w)
        vectors.mlab_source.set(scalars=scalars)
        vectors.mlab_source.set(vectors=np.c_[u, v, w])
        vectors.glyph.glyph.scale_factor = scale_factor

        lut = vectors.module_manager.scalar_lut_manager
        lut.set(show_scalar_bar=True, show_legend=True, data_name='moment')
Exemple #29
0
class ButterworthFilter(Smoothing):
    cutoff_frequency_ratio = t.Range(0., 1., 0.05)
    type = t.Enum('low', 'high')
    order = t.Int(2)

    view = tu.View(
        tu.Group('cutoff_frequency_ratio', 'order', 'type'),
        kind='live',
        handler=SmoothingHandler,
        buttons=OKCancelButtons,
        title='Butterworth filter',
    )

    def _cutoff_frequency_ratio_changed(self, old, new):
        self.update_lines()

    def _type_changed(self, old, new):
        self.update_lines()

    def _order_changed(self, old, new):
        self.update_lines()

    def model2plot(self, axes_manager=None):
        b, a = sp.signal.butter(self.order, self.cutoff_frequency_ratio,
                                self.type)
        smoothed = sp.signal.filtfilt(b, a, self.signal())
        return smoothed
Exemple #30
0
class ButterworthFilter(Smoothing):
    cutoff_frequency_ratio = t.Range(0.01, 1., 0.01)
    type = t.Enum('low', 'high')
    order = t.Int(2)

    def _cutoff_frequency_ratio_changed(self, old, new):
        self.update_lines()

    def _type_changed(self, old, new):
        self.update_lines()

    def _order_changed(self, old, new):
        self.update_lines()

    def model2plot(self, axes_manager=None):
        b, a = sp.signal.butter(self.order, self.cutoff_frequency_ratio,
                                self.type)
        smoothed = sp.signal.filtfilt(b, a, self.signal())
        return smoothed

    def apply(self):
        b, a = sp.signal.butter(self.order, self.cutoff_frequency_ratio,
                                self.type)
        f = functools.partial(sp.signal.filtfilt, b, a)
        self.signal.map(f)