Esempio n. 1
0
class EntryBlock(traits.HasTraits):

    fieldName = traits.String(
        "fieldName",
        desc=
        "describes what the information to be entered in the text block is referring to"
    )
    textBlock = traits.String()

    traits_view = traitsui.View(
        traitsui.VGroup(traitsui.Item("fieldName",
                                      show_label=False,
                                      style="readonly"),
                        traitsui.Item("textBlock",
                                      show_label=False,
                                      style="custom"),
                        show_border=True,
                        label="information"))

    def __init__(self, **traitsDict):
        """user supplies arguments in init to supply class attributes defined above """
        super(EntryBlock, self).__init__(**traitsDict)

    def clearTextBlock(self):
        self.textBlock = ""
Esempio n. 2
0
class EntryBlock(traits.HasTraits):
    
    fieldName = traits.String("fieldName",desc = "describes what the information to be entered in the text block is referring to")
    textBlock = traits.String()
    commitButton = traits.Button("save",desc="commit information in text block to logFile")

    
    traits_view = traitsui.View(traitsui.VGroup(
                    traitsui.Item("fieldName",show_label=False, style="readonly"),
                    traitsui.Item("textBlock",show_label=False, style="custom"),
                    traitsui.Item("commitButton",show_label=False), show_border=True, label="information"
                        ))
    
    def __init__(self, **traitsDict):
        """user supplies arguments in init to supply class attributes defined above """
        super(EntryBlock,self).__init__(**traitsDict)
        
    def _commitButton_fired(self):
        logger.info("saving %s info starting" % self.fieldName)
        timeStamp = datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S")
        blockDelimiterStart = "__"+self.fieldName+"__<start>"
        blockDelimiterEnd = "__"+self.fieldName+"__<end>"
        fullString = "\n"+blockDelimiterStart+"\n"+timeStamp+"\n"+self.textBlock+"\n"+blockDelimiterEnd+"\n"
        with open(self.commentFile, "a+") as writeFile:
            writeFile.write(fullString)
        logger.info("saving %s info finished" % self.fieldName)
    
    def clearTextBlock(self):
        self.textBlock = ""
Esempio n. 3
0
class config(BaseWorkflowConfig):
    uuid = traits.Str(desc="UUID")

    # Directories
    base_dir = Directory(os.path.abspath('.'),mandatory=True, desc='Base directory of data. (Should be subject-independent)')
    sink_dir = Directory(mandatory=True, desc="Location where the BIP will store the results")

    # Subjects
    subjects = traits.List(traits.Str, mandatory=True, usedefault=True,
                          desc="Subject id's. Note: These MUST match the subject id's in the \
                                Freesurfer directory. For simplicity, the subject id's should \
                                also match with the location of individual functional files.")
    fwhm=traits.List(traits.Float())
    inputs_template = traits.String('%s/preproc/output/fwhm_%s/*.nii.gz')
    meanfunc_template = traits.String('%s/preproc/mean/*_mean.nii.gz')
    fsl_mat_template = traits.String('%s/preproc/bbreg/*.mat')
    unwarped_brain_template = traits.String('%s/smri/unwarped_brain/*.nii*')
    affine_transformation_template = traits.String('%s/smri/affine_transformation/*.nii*')
    warp_field_template = traits.String('%s/smri/warped_field/*.nii*')

    
    #Normalization
    standard_transform_template = traits.File(mandatory=True,desc='Standard template to warp to')
    standard_warp_field_template = traits.String()
    standard_affine_transformation_template = traits.String()
    standard_norm_template = traits.File()
    standard_warp_field_template = traits.File()
    standard_affine_transformation_template = traits.File()
    # Advanced Options
    use_advanced_options = traits.Bool()
    advanced_script = traits.Code()

    # Buttons
    check_func_datagrabber = Button("Check")
Esempio n. 4
0
def getNode(_type,tr,config):
    from bips.workflows.flexible_datagrabber import Data, DataBase
    if _type == type(traits.Int()):
            col_type = colander.SchemaNode(colander.Int(),
                                           name=tr,description=config.trait(tr).desc)
    elif _type == type(traits.Float()):
        col_type = colander.SchemaNode(colander.Decimal(),name=tr)    
        
    elif _type == type(traits.String()) or _type==type(traits.Str()):
        col_type = colander.SchemaNode(colander.String(),name=tr)
        
    elif _type == type(traits.Enum('')):
        values=config.trait(tr).trait_type.values
        the_values = []
        for v in values:
            the_values.append((v,v))
        col_type = colander.SchemaNode(
            deform.Set(),
            widget=deform.widget.SelectWidget(values=the_values),
            name=tr)
    elif _type == type(traits.Bool()):
        col_type = colander.SchemaNode(colander.Boolean(),widget=deform.widget.CheckboxWidget(),name=tr)
    elif _type == type(traits.Code()):
        col_type = colander.SchemaNode(colander.String(),name=tr,widget=deform.widget.TextAreaWidget(cols=100,rows=20))
    elif _type == type(traits.Instance(Data,())):
        from bips.workflows.flexible_datagrabber import create_datagrabber_html_view
        col_type = create_datagrabber_html_view() 
    elif _type == type(traits.List()):
        col_type =get_list(_type,tr,config) 
    else:
        print "type: ", _type, "not found!"
        col_type = colander.SchemaNode(colander.String(),name=tr)
    return col_type
class config(BaseWorkflowConfig):
    uuid = traits.Str(desc="UUID")

    # Directories
    base_dir = Directory(os.path.abspath('.'),mandatory=True, desc='Base directory of data. (Should be subject-independent)')
    sink_dir = Directory(mandatory=True, desc="Location where the BIP will store the results")

    datagrabber = traits.Instance(Data, ())
    run_mode = traits.Enum("flame1","ols","flame12")
    save_script_only = traits.Bool(False)
    #Normalization
    brain_mask = traits.File(mandatory=True,desc='Brain Mask')
    name_of_project = traits.String("group_analysis",usedefault=True)
    do_randomize = traits.Bool(True)
    num_iterations = traits.Int(5000)

    #Correction:
    run_correction = traits.Bool(True)
    z_threshold = traits.Float(2.3)
    p_threshold = traits.Float(0.05)
    connectivity = traits.Int(26)

    # Advanced Options
    use_advanced_options = traits.Bool()
    advanced_script = traits.Code()

    # Buttons
    check_func_datagrabber = Button("Check")
Esempio n. 6
0
class config(HasTraits):
    uuid = traits.Str(desc="UUID")
    desc = traits.Str(desc='Workflow description')
    # Directories
    working_dir = Directory(mandatory=True, desc="Location of the Nipype working directory")
    crash_dir = Directory(mandatory=False, desc="Location to store crash files")
    sink_dir = Directory(mandatory=True, desc="Location to store results")
    save_script_only = traits.Bool(False)
    # Execution

    run_using_plugin = Bool(False, usedefault=True, desc="True to run pipeline with plugin, False to run serially")
    plugin = traits.Enum("PBS", "PBSGraph","MultiProc", "SGE", "Condor",
        usedefault=True,
        desc="plugin to use, if run_using_plugin=True")
    plugin_args = traits.Dict({"qsub_args": "-q many"},
        usedefault=True, desc='Plugin arguments.')
    # Subjects
    interpolation = traits.Enum('trilinear','nearestneighbour','sinc',usedefault=True)
    name = traits.String('flirt_output',desc='name of folder to store flirt mats')
    datagrabber_create = traits.Instance(Data, ())
    datagrabber_apply = traits.Instance(Data, ())
    create_transform = traits.Bool(True)
    apply_transform = traits.Bool(False)
    # Advanced Options
    use_advanced_options = traits.Bool()
    advanced_script = traits.Code()
class config(BaseWorkflowConfig):
    uuid = traits.Str(desc="UUID")
    desc = traits.Str(desc="Workflow Description")
    # Directories
    sink_dir = Directory(os.path.abspath('.'),
                         mandatory=True,
                         desc="Location where the BIP will store the results")
    surf_dir = Directory(mandatory=True, desc="Freesurfer subjects directory")
    save_script_only = traits.Bool(False)

    datagrabber = traits.Instance(Data, ())
    projection_stem = traits.Str('-projfrac-avg 0 1 0.1',
                                 desc='how to project data onto the surface')
    out_type = traits.Enum('mat', 'hdf5', desc='mat or hdf5')
    hdf5_package = traits.Enum('h5py',
                               'pytables',
                               desc='which hdf5 package to use')
    target_surf = traits.Enum('fsaverage4',
                              'fsaverage3',
                              'fsaverage5',
                              'fsaverage6',
                              'fsaverage',
                              'subject',
                              desc='which average surface to map to')
    surface_fwhm = traits.List([5],
                               traits.Float(),
                               mandatory=True,
                               usedefault=True,
                               desc="How much to smooth on target surface")
    roiname = traits.String('amygdala')
    use_advanced_options = Bool(False)
    advanced_options = traits.Code()
Esempio n. 8
0
class Element(traits.HasTraits):
    """parent class for a defined element. Element can be chosen as
    a physics property in the physics tab and allow fits to calculate
    properties of atomic clouds"""
    nameID = traits.String(
        desc="name of element for dictionary key (no superscripts etc)")
    massATU = traits.Float(22.9897692807,
                           label="mass (u)",
                           desc="mass in atomic mass units")
    decayRateMHz = traits.Float(
        9.7946,
        label=u"Decay Rate \u0393 (MHz)",
        desc="decay rate/ natural line width of 2S1/2 -> 2P3/2")

    crossSectionSigmaPlus = traits.Float(
        1.6573163925E-13,
        label=u"cross section \u03C3 + (m^2)",
        desc=
        "resonant cross section 2S1/2 -> 2P3/2. Warning not accurate for 6Li yet"
    )

    scatteringLength = traits.Float(62.0, label="scattering length (a0)")
    IsatSigmaPlus = traits.Float(6.260021,
                                 width=10,
                                 label=u"Isat (mW/cm^2)",
                                 desc="I sat sigma + 2S1/2 -> 2P3/2")

    traits_view = traitsui.View(
        traitsui.VGroup(
            traitsui.Item("nameID", style="readonly"),
            traitsui.Item("massATU", style="readonly"),
            traitsui.Item("decayRateMHz", style="readonly"),
            traitsui.Item("crossSectionSigmaPlus", style="readonly"),
            traitsui.Item("scatteringLength"),
            traitsui.Item("IsatSigmaPlus", style="readonly")))
class config(HasTraits):
    uuid = traits.Str(desc="UUID")
    desc = traits.Str(desc="Workflow Description")
    # Directories
    working_dir = Directory(mandatory=True, desc="Location of the Nipype working directory")
    sink_dir = Directory(os.path.abspath('.'), mandatory=True, desc="Location where the BIP will store the results")
    crash_dir = Directory(mandatory=False, desc="Location to store crash files")
    surf_dir = Directory(mandatory=True, desc= "Freesurfer subjects directory")
    save_script_only = traits.Bool(False)
    # Execution

    run_using_plugin = Bool(False, usedefault=True, desc="True to run pipeline with plugin, False to run serially")
    plugin = traits.Enum("PBS", "MultiProc", "SGE", "Condor",
        usedefault=True,
        desc="plugin to use, if run_using_plugin=True")
    plugin_args = traits.Dict({"qsub_args": "-q many"},
        usedefault=True, desc='Plugin arguments.')
    test_mode = Bool(False, mandatory=False, usedefault=True,
        desc='Affects whether where and if the workflow keeps its \
                            intermediary files. True to keep intermediary files. ')
    timeout = traits.Float(14.0)
    datagrabber = traits.Instance(Data, ())
    surface_template = traits.Enum("fsaverage","fsaverage5","fsaverage6","fsaverage4","subject")
    test_name = traits.String('FS_one_sample_t_test')
    # First Level
    #advanced_options
    use_advanced_options = Bool(False)
    advanced_options = traits.Code()
class config(BaseWorkflowConfig):
    uuid = traits.Str(desc="UUID")
    desc = traits.Str(desc='Workflow description')
    # Directories
    base_dir = Directory(
        exists=True,
        desc='Base directory of data. (Should be subject-independent)')
    sink_dir = Directory(mandatory=True,
                         desc="Location where the BIP will store the results")
    field_dir = Directory(
        exists=True,
        desc="Base directory of field-map data (Should be subject-independent) \
                                                 Set this value to None if you don't want fieldmap distortion correction"
    )
    surf_dir = Directory(mandatory=True, desc="Freesurfer subjects directory")
    save_script_only = traits.Bool(False)

    # Subjects

    datagrabber = traits.Instance(Data, ())
    TR = traits.Float(6.0)
    preproc_config = traits.File(desc="preproc config file")
    json_name = traits.String('preproc_metrics')
    # Advanced Options
    use_advanced_options = traits.Bool()
    advanced_script = traits.Code()
Esempio n. 11
0
class config(HasTraits):
    uuid = traits.Str(desc="UUID")
    desc = traits.Str(desc='Workflow description')
    # Directories
    working_dir = Directory(mandatory=True,
                            desc="Location of the Nipype working directory")
    crash_dir = Directory(mandatory=False,
                          desc="Location to store crash files")
    save_script_only = traits.Bool(False)
    sink_dir = Directory(mandatory=True, desc="Location to store results")
    # Execution

    run_using_plugin = Bool(
        False,
        usedefault=True,
        desc="True to run pipeline with plugin, False to run serially")
    plugin = traits.Enum("PBS",
                         "PBSGraph",
                         "MultiProc",
                         "SGE",
                         "Condor",
                         usedefault=True,
                         desc="plugin to use, if run_using_plugin=True")
    plugin_args = traits.Dict({"qsub_args": "-q many"},
                              usedefault=True,
                              desc='Plugin arguments.')
    # Subjects
    datagrabber = traits.Instance(Data, ())
    name = traits.String('mean')
    # Advanced Options
    use_advanced_options = traits.Bool()
    advanced_script = traits.Code()
Esempio n. 12
0
class OrNode(OpNode):
    """Parallel evaluation semantics

    Children of :class:`OrNode` will be evaluated in parallel, sparked
    in the order in which they were added as children of this node.

    Example:

    >>> @delayed()
    ... def foo(a): return 42
    >>> foo(42) | foo(24)
    <cloudmesh_workflow.workflow.OrNode object at ...>
    """

    # Implementation notes:
    #
    # Evaluation is done by sparking all children, then waiting for
    # all children.

    name = T.String('|')

    def start(self):
        for child in self.children_iter:
            child.start()

    def wait(self):
        for child in self.children_iter:
            child.wait()
        self.result.set_result(None)
class config(HasTraits):

    # Directories
    working_dir = Directory(mandatory=True, desc="Location of the Nipype working directory")
    base_dir = Directory(mandatory=True, desc='Base directory of data. (Should be subject-independent)')
    sink_dir = Directory(mandatory=True, desc="Location where the BIP will store the results")
    crash_dir = Directory(mandatory=False, desc="Location to store crash files")

    # Execution
    run_using_plugin = Bool(False, usedefault=True, desc="True to run pipeline with plugin, False to run serially")
    plugin = traits.Enum("PBS", "PBSGraph","MultiProc", "SGE", "Condor",
        usedefault=True,
        desc="plugin to use, if run_using_plugin=True")
    plugin_args = traits.Dict({"qsub_args": "-q many"},
        usedefault=True, desc='Plugin arguments.')
    test_mode = Bool(False, mandatory=False, usedefault=True,
        desc='Affects whether where and if the workflow keeps its \
                            intermediary files. True to keep intermediary files. ')
    timeout = traits.Float(14.0)
    #Subjects
    subjects= traits.List(traits.Str, mandatory=True, usedefault=True,
        desc="Subject id's. Bips expects dicoms to be organized by subject id's")
    dicom_dir_template = traits.String('%s/dicoms/')

    #Conversion Options
    embed_meta = traits.Bool(True)
    info_only = traits.Bool(True)
    no_moco = traits.Bool(False,desc="only convert non-moco files")
    use_heuristic = traits.Bool(False)
    heuristic_file = traits.File(desc="heuristic file")
Esempio n. 14
0
class InputData(T.HasTraits):
    dwi_images = nifti_file
    fa_file = nifti_file
    bvec_file = T.File(filter=['*.bvec'])
    bvec_orientation = T.String('IMG', minlen=3, maxlen=3)
    min_signal = T.Float(1)

    @T.on_trait_change('dwi_images')
    def update_files(self):
        dir, file = path.split(self.dwi_images)
        base = string.split(file, path.extsep, 1)[0]
        if self.fa_file == '':
            self.fa_file = path.join(dir, base + '_fa.nii.gz')
        if self.bvec_file == '':
            self.bvec_file = path.join(dir, base + '.bvec')

    def read_data(self):
        data_img = nib.load(self.dwi_images)
        affine = data_img.get_affine()
        voxel_size = data_img.get_header().get_zooms()
        voxel_size = voxel_size[:3]
        fa_img = nib.load(self.fa_file)
        assert data_img.shape[:-1] == fa_img.shape
        bvec, bval = read_bvec_file(self.bvec_file)
        data_ornt = nib.io_orientation(affine)
        if self.bvec_orientation != 'IMG':
            bvec = reorient_vectors(bvec, self.bvec_orientation, data_ornt)
        fa = fa_img.get_data()
        data = data_img.get_data()
        return data, voxel_size, affine, fa, bvec, bval
class config(HasTraits):
    uuid = traits.Str(desc="UUID")
    desc = traits.Str(desc="Workflow Description")
    # Directories
    working_dir = Directory(mandatory=True,
                            desc="Location of the Nipype working directory")
    sink_dir = Directory(os.path.abspath('.'),
                         mandatory=True,
                         desc="Location where the BIP will store the results")
    crash_dir = Directory(mandatory=False,
                          desc="Location to store crash files")
    surf_dir = Directory(mandatory=True, desc="Freesurfer subjects directory")

    # Execution

    run_using_plugin = Bool(
        False,
        usedefault=True,
        desc="True to run pipeline with plugin, False to run serially")
    plugin = traits.Enum("PBS",
                         "MultiProc",
                         "SGE",
                         "Condor",
                         usedefault=True,
                         desc="plugin to use, if run_using_plugin=True")
    plugin_args = traits.Dict({"qsub_args": "-q many"},
                              usedefault=True,
                              desc='Plugin arguments.')
    test_mode = Bool(
        False,
        mandatory=False,
        usedefault=True,
        desc='Affects whether where and if the workflow keeps its \
                            intermediary files. True to keep intermediary files. '
    )
    timeout = traits.Float(14.0)
    datagrabber = traits.Instance(Data, ())
    projection_stem = traits.Str('-projfrac-avg 0 1 0.1',
                                 desc='how to project data onto the surface')
    out_type = traits.Enum('mat', 'hdf5', desc='mat or hdf5')
    hdf5_package = traits.Enum('h5py',
                               'pytables',
                               desc='which hdf5 package to use')
    target_surf = traits.Enum('fsaverage4',
                              'fsaverage3',
                              'fsaverage5',
                              'fsaverage6',
                              'fsaverage',
                              'subject',
                              desc='which average surface to map to')
    surface_fwhm = traits.List([5],
                               traits.Float(),
                               mandatory=True,
                               usedefault=True,
                               desc="How much to smooth on target surface")
    roiname = traits.String('amygdala')
    use_advanced_options = Bool(False)
    advanced_options = traits.Code()
Esempio n. 16
0
class config(HasTraits):
    uuid = traits.Str(desc="UUID")

    # Directories
    working_dir = Directory(mandatory=True,
                            desc="Location of the Nipype working directory")
    base_dir = Directory(
        os.path.abspath('.'),
        mandatory=True,
        desc='Base directory of data. (Should be subject-independent)')
    sink_dir = Directory(mandatory=True,
                         desc="Location where the BIP will store the results")
    crash_dir = Directory(mandatory=False,
                          desc="Location to store crash files")

    # Execution
    run_using_plugin = Bool(
        False,
        usedefault=True,
        desc="True to run pipeline with plugin, False to run serially")
    plugin = traits.Enum("PBS",
                         "MultiProc",
                         "SGE",
                         "Condor",
                         usedefault=True,
                         desc="plugin to use, if run_using_plugin=True")
    plugin_args = traits.Dict({"qsub_args": "-q many"},
                              usedefault=True,
                              desc='Plugin arguments.')
    test_mode = Bool(
        False,
        mandatory=False,
        usedefault=True,
        desc='Affects whether where and if the workflow keeps its \
                            intermediary files. True to keep intermediary files. '
    )
    timeout = traits.Float(14.0)
    datagrabber = traits.Instance(Data, ())
    run_mode = traits.Enum("flame1", "ols", "flame12")
    save_script_only = traits.Bool(False)
    #Normalization
    brain_mask = traits.File(mandatory=True, desc='Brain Mask')
    name_of_project = traits.String("group_analysis", usedefault=True)
    do_randomize = traits.Bool(True)
    num_iterations = traits.Int(5000)

    #Correction:
    run_correction = traits.Bool(True)
    z_threshold = traits.Float(2.3)
    p_threshold = traits.Float(0.05)
    connectivity = traits.Int(26)

    # Advanced Options
    use_advanced_options = traits.Bool()
    advanced_script = traits.Code()

    # Buttons
    check_func_datagrabber = Button("Check")
Esempio n. 17
0
File: fsl.py Progetto: servoz/capsul
def edition_widget(engine, environment):
    ''' Edition GUI for FSL config - see
    :class:`~capsul.qt_gui.widgets.settings_editor.SettingsEditor`
    '''
    from soma.qt_gui.controller_widget import ScrollControllerWidget
    from soma.controller import Controller
    import types
    import traits.api as traits

    def validate_config(widget):
        controller = widget.controller_widget.controller
        with widget.engine.settings as session:
            conf = session.config('fsl', widget.environment)
            values = {'config_id': 'fsl'}
            for k in ('directory', 'config', 'prefix'):
                value = getattr(controller, k)
                if value is traits.Undefined:
                    value = None
                values[k] = value
            if conf is None:
                session.new_config('fsl', widget.environment, values)
            else:
                for k, value in values.items():
                    if k == 'config_id':
                        continue
                    setattr(conf, k, values[k])

    controller = Controller()

    controller.add_trait(
        'directory',
        traits.Directory(traits.Undefined,
                         desc='Directory where FSL is installed'))
    controller.add_trait(
        'config',
        traits.File(traits.Undefined,
                    output=False,
                    desc='Parameter to specify the fsl.sh path'))
    controller.add_trait(
        'prefix',
        traits.String(traits.Undefined, desc='Prefix to add to FSL commands'))

    conf = engine.settings.select_configurations(environment, {'fsl': 'any'})
    if conf:
        fconf = conf.get('capsul.engine.module.fsl', {})
        controller.directory = fconf.get('directory', traits.Undefined)
        controller.config = fconf.get('config', traits.Undefined)
        controller.prefix = fconf.get('prefix', traits.Undefined)

    widget = ScrollControllerWidget(controller, live=True)
    widget.engine = engine
    widget.environment = environment
    widget.accept = types.MethodType(validate_config, widget)

    return widget
Esempio n. 18
0
class newDarkPictureDialog(traits.HasTraits):
    # pathSourceImages = traits.Directory( os.path.join("\\\\192.168.16.71","Humphry","Data","eagleLogs") )
    pathSourceImages = traits.Directory( eagleLogsFolder )
    pathNewDarkPicture = traits.File( defaultDarkPictureFilename, editor = traitsui.FileEditor(dialog_style='save') )
    cancelButton = traitsui.Action(name = 'Cancel', action = '_cancel')
    okButton = traitsui.Action(name = 'Calculate dark picture', action = '_ok')

    date = traits.String( time.strftime('%Y %m %d'), desc='Date' )
    camera = traits.String( "Andor1" )
    interval = traits.Float(0.003)
    filterCountLi = traits.Int(1)
    temperature = traits.Float(-40.0)
    autoFilename = traits.Button('Auto Filename')

    traits_view = traitsui.View(
        traitsui.Group(
            traitsui.Item('pathSourceImages'),
            traitsui.Group(
                traitsui.Item('date'),
                traitsui.Item('camera'),
                traitsui.Item('interval'),
                traitsui.Item('temperature'),
                traitsui.Item('autoFilename'),
                label='Auto Filename', show_border=True
            ),
            traitsui.Item('pathNewDarkPicture')
        ),
        buttons = [cancelButton, okButton],
        handler = newDarkPictureDialogHandler()
    )

    def _autoFilename_fired(self):
        filename = self.date + ' - dark ' + self.camera + ' - '
        filename += 'interval {} '.format(self.interval)
        filename += 'temperature {} '.format(self.temperature)
        filename = filename.replace('.','_')
        # filename += '.gz'
        filename += '.npy'
        path = os.path.join( defaultDarkPictureFilename, self.camera)
        if not os.path.exists( path ):
            os.mkdir( path )
        self.pathNewDarkPicture = os.path.join( path, filename )
Esempio n. 19
0
class Model(traits.HasTraits):

    model = traits.Instance(lmfit.Model)
    function = None  #python function for fitting
    guessFunction = None
    definitionString = traits.String()
    modelName = traits.String("")

    def __init__(self, function, **traitsDict):
        super(Model, self).__init__(**traitsDict)
        self.function = function
        self.model = lmfit.Model(function)
        self.parameters = self.model.make_params()
        try:
            self.definitionString = inspect.getsource(function)
        except IOError as e:
            self.definitionString = e.message
        if hasattr(fittingFunctions, function.__name__ + "_guess"):
            self.guessFunction = getattr(fittingFunctions,
                                         function.__name__ + "_guess")
class config(BaseWorkflowConfig):
    uuid = traits.Str(desc="UUID")
    desc = traits.Str(desc='Workflow description')
    # Directories
    base_dir = Directory(
        os.path.abspath('.'),
        exists=True,
        desc='Base directory of data. (Should be subject-independent)')
    sink_dir = Directory(mandatory=True,
                         desc="Location where the BIP will store the results")
    field_dir = Directory(
        exists=True,
        desc="Base directory of field-map data (Should be subject-independent) \
                                                     Set this value to None if you don't want fieldmap distortion correction"
    )
    json_sink = Directory(mandatory=False, desc="Location to store json_files")
    surf_dir = Directory(mandatory=True, desc="Freesurfer subjects directory")

    # Subjects

    subjects = traits.List(
        traits.Str,
        mandatory=True,
        usedefault=True,
        desc="Subject id's. Note: These MUST match the subject id's in the \
                                    Freesurfer directory. For simplicity, the subject id's should \
                                    also match with the location of individual functional files."
    )
    func_template = traits.String('%s/functional.nii.gz')
    run_datagrabber_without_submitting = Bool(True, usedefault=True)
    # Motion Correction

    do_slicetiming = Bool(True,
                          usedefault=True,
                          desc="Perform slice timing correction")
    SliceOrder = traits.List(traits.Int)
    TR = traits.Float(mandatory=True, desc="TR of functional")
    save_script_only = traits.Bool(False)

    # Buttons
    check_func_datagrabber = Button("Check")

    def _check_func_datagrabber_fired(self):
        subs = self.subjects

        for s in subs:
            if not os.path.exists(
                    os.path.join(self.base_dir, self.func_template % s)):
                print "ERROR", os.path.join(self.base_dir, self.func_template %
                                            s), "does NOT exist!"
                break
            else:
                print os.path.join(self.base_dir,
                                   self.func_template % s), "exists!"
Esempio n. 21
0
class OrNode(OpNode):

    name = T.String('|')

    def start(self):
        for child in self.children_iter:
            child.start()

    def wait(self):
        for child in self.children_iter:
            child.wait()
Esempio n. 22
0
class config(HasTraits):
    uuid = traits.Str(desc="UUID")

    # Directories
    working_dir = Directory(mandatory=True, desc="Location of the Nipype working directory")
    base_dir = Directory(os.path.abspath('.'),mandatory=True, desc='Base directory of data. (Should be subject-independent)')
    sink_dir = Directory(mandatory=True, desc="Location where the BIP will store the results")
    crash_dir = Directory(mandatory=False, desc="Location to store crash files")
    surf_dir = Directory(mandatory=True, desc="Freesurfer subjects directory")

    # Execution
    run_using_plugin = Bool(False, usedefault=True, desc="True to run pipeline with plugin, False to run serially")
    plugin = traits.Enum("PBS", "MultiProc", "SGE", "Condor",
                         usedefault=True,
                         desc="plugin to use, if run_using_plugin=True")
    plugin_args = traits.Dict({"qsub_args": "-q many"},
                                                      usedefault=True, desc='Plugin arguments.')
    test_mode = Bool(False, mandatory=False, usedefault=True,
                     desc='Affects whether where and if the workflow keeps its \
                            intermediary files. True to keep intermediary files. ')
    timeout = traits.Float(14.0)
    # Subjects
    subjects = traits.List(traits.Str, mandatory=True, usedefault=True,
                          desc="Subject id's. Note: These MUST match the subject id's in the \
                                Freesurfer directory. For simplicity, the subject id's should \
                                also match with the location of individual functional files.")
    fwhm=traits.List(traits.Float())
    inputs_template = traits.String('%s/preproc/output/fwhm_%s/*.nii.gz')
    meanfunc_template = traits.String('%s/preproc/mean/*_mean.nii.gz')
    fsl_mat_template = traits.String('%s/preproc/bbreg/*.mat')

    #Normalization
    norm_template = traits.File(mandatory=True,desc='Template to warp to')
    do_segment = traits.Bool(False)
    do_anatomical_only = traits.Bool(True)
    # Advanced Options
    use_advanced_options = traits.Bool()
    advanced_script = traits.Code()
    save_script_only = traits.Bool(False)
    # Buttons
    check_func_datagrabber = Button("Check")
class Option(traits.HasTraits):

    name = traits.String(
        desc="key from options dictionary. describes the option")
    value = traits.Any()
    traits_view = traitsui.View(
        traitsui.HGroup(
            traitsui.Item("name",
                          style="readonly",
                          springy=True,
                          show_label=False),
            traitsui.Item("value", show_label=False, springy=True)))
Esempio n. 24
0
class config(HasTraits):
    uuid = traits.Str(desc="UUID")
    desc = traits.Str(desc='Workflow description')
    # Directories
    working_dir = Directory(mandatory=True,
                            desc="Location of the Nipype working directory")
    base_dir = Directory(
        exists=True,
        desc='Base directory of data. (Should be subject-independent)')
    sink_dir = Directory(mandatory=True,
                         desc="Location where the BIP will store the results")
    field_dir = Directory(
        exists=True,
        desc="Base directory of field-map data (Should be subject-independent) \
                                                 Set this value to None if you don't want fieldmap distortion correction"
    )
    crash_dir = Directory(mandatory=False,
                          desc="Location to store crash files")
    surf_dir = Directory(mandatory=True, desc="Freesurfer subjects directory")
    save_script_only = traits.Bool(False)
    # Execution

    run_using_plugin = Bool(
        False,
        usedefault=True,
        desc="True to run pipeline with plugin, False to run serially")
    plugin = traits.Enum("PBS",
                         "PBSGraph",
                         "MultiProc",
                         "SGE",
                         "Condor",
                         usedefault=True,
                         desc="plugin to use, if run_using_plugin=True")
    plugin_args = traits.Dict({"qsub_args": "-q many"},
                              usedefault=True,
                              desc='Plugin arguments.')
    test_mode = Bool(
        False,
        mandatory=False,
        usedefault=True,
        desc='Affects whether where and if the workflow keeps its \
                            intermediary files. True to keep intermediary files. '
    )
    # Subjects

    datagrabber = traits.Instance(Data, ())
    TR = traits.Float(6.0)
    preproc_config = traits.File(desc="preproc config file")
    json_name = traits.String('preproc_metrics')
    # Advanced Options
    use_advanced_options = traits.Bool()
    advanced_script = traits.Code()
Esempio n. 25
0
class SBFindOptParamInputSpec(CommandLineInputSpec):
    freq_loc = File(exists=True, desc='Input freq filename',
                                 argstr="%s", position=1,
                                 copyfile=False, mandatory=True)    
    reliability_mask_loc = File(exists=True, desc='Phase voxels to leave out of data fidelity term',
                                 argstr="%s", position=3,
                                 copyfile=False, mandatory=False) 
    mask_loc = File(exists=True, desc = 'Mask (3D)', argstr="%s",
                  position=2,copyfile=False,mandatory=True)
    result_mat_file_loc = File(desc="Output mat filename where optimal param is stored",argstr='%s',position=4,               
                   mandatory=True)    
    quit_matlab = traits.String(';quit;"',desc='needed to quit matlab',argstr='%s',
                      position=7,mandatory=False,usedefault=True)   
Esempio n. 26
0
class config(BaseWorkflowConfig):
    uuid = traits.Str(desc="UUID")
    desc = traits.Str(desc='Workflow description')
    # Directories
    save_script_only = traits.Bool(False)
    sink_dir = Directory(mandatory=True, desc="Location to store results")

    # Subjects
    datagrabber = traits.Instance(Data, ())
    name = traits.String('mean')
    # Advanced Options
    use_advanced_options = traits.Bool()
    advanced_script = traits.Code()
Esempio n. 27
0
class config(BaseWorkflowConfig):
    uuid = traits.Str(desc="UUID")
    desc = traits.Str(desc='Workflow description')
    # Directories
    base_dir = Directory(os.path.abspath('.'),mandatory=True, desc='Base directory of data. (Should be subject-independent)')
    sink_dir = Directory(os.path.abspath('.'),mandatory=True, desc="Location where the BIP will store the results")
    field_dir = Directory(desc="Base directory of field-map data (Should be subject-independent) \
                                                 Set this value to None if you don't want fieldmap distortion correction")
    surf_dir = Directory(mandatory=True, desc= "Freesurfer subjects directory")

    # Subjects

    subjects= traits.List(traits.Str, mandatory=True, usedefault=True,
        desc="Subject id's. Note: These MUST match the subject id's in the \
                                Freesurfer directory. For simplicity, the subject id's should \
                                also match with the location of individual functional files.")
    func_template = traits.String('%s/functional.nii.gz')
    run_datagrabber_without_submitting = traits.Bool(desc="Run the datagrabber without \
    submitting to the cluster")
    timepoints_to_remove = traits.Int(0,usedefault=True)

    do_slicetiming = Bool(True, usedefault=True, desc="Perform slice timing correction")
    SliceOrder = traits.List(traits.Int)
    order = traits.Enum('motion_slicetime','slicetime_motion',use_default=True)
    TR = traits.Float(mandatory=True, desc = "TR of functional")
    motion_correct_node = traits.Enum('nipy','fsl','spm','afni',
        desc="motion correction algorithm to use",
        usedefault=True,)

    csf_prob = traits.File(desc='CSF_prob_map') 
    grey_prob = traits.File(desc='grey_prob_map')
    white_prob = traits.File(desc='white_prob_map')
    # Artifact Detection

    norm_thresh = traits.Float(1, min=0, usedefault=True, desc="norm thresh for art")
    z_thresh = traits.Float(3, min=0, usedefault=True, desc="z thresh for art")

    # Smoothing
    fwhm = traits.Float(6.0,usedefault=True)
    save_script_only = traits.Bool(False)
    check_func_datagrabber = Button("Check")

    def _check_func_datagrabber_fired(self):
        subs = self.subjects

        for s in subs:
            if not os.path.exists(os.path.join(self.base_dir,self.func_template % s)):
                print "ERROR", os.path.join(self.base_dir,self.func_template % s), "does NOT exist!"
                break
            else:
                print os.path.join(self.base_dir,self.func_template % s), "exists!"
Esempio n. 28
0
class ImHistMatchInputSpec(CommandLineInputSpec):

    in_file = File(exists=True, desc='Input filename',
                                 argstr='%s', position=1,
                                 copyFile=False, mandatory=True)
        
    ref = File(exists=True, desc = 'Reference filename', argstr='%s',
                  position=2,copyFile=False,mandatory=True)  
    
    result_filename = File(desc="Output LFS filename",argstr='%s',position=3,               
                   mandatory=True)   
    
    quit_matlab = traits.String(';quit;"',desc='needed to quit matlab',argstr='%s',
                      position=4,mandatory=False,usedefault=True)   
Esempio n. 29
0
class config(BaseWorkflowConfig):
    uuid = traits.Str(desc="UUID")

    # Directories
    base_dir = Directory(
        os.path.abspath('.'),
        mandatory=True,
        desc='Base directory of data. (Should be subject-independent)')
    sink_dir = Directory(mandatory=True,
                         desc="Location where the BIP will store the results")
    surf_dir = Directory(mandatory=True, desc="Freesurfer subjects directory")

    # Subjects
    subjects = traits.List(
        traits.Str,
        mandatory=True,
        usedefault=True,
        desc="Subject id's. Note: These MUST match the subject id's in the \
                                Freesurfer directory. For simplicity, the subject id's should \
                                also match with the location of individual functional files."
    )
    fwhm = traits.List(traits.Float())
    inputs_template = traits.String('%s/preproc/output/fwhm_%s/*.nii.gz')
    meanfunc_template = traits.String('%s/preproc/mean/*_mean.nii.gz')
    fsl_mat_template = traits.String('%s/preproc/bbreg/*.mat')

    #Normalization
    norm_template = traits.File(mandatory=True, desc='Template to warp to')
    do_segment = traits.Bool(False)
    do_anatomical_only = traits.Bool(True)
    # Advanced Options
    use_advanced_options = traits.Bool()
    advanced_script = traits.Code()
    save_script_only = traits.Bool(False)
    # Buttons
    check_func_datagrabber = Button("Check")
class config(BaseWorkflowConfig):
    uuid = traits.Str(desc="UUID")
    desc = traits.Str(desc="Workflow Description")
    # Directories
    sink_dir = Directory(os.path.abspath('.'), mandatory=True, desc="Location where the BIP will store the results")
    surf_dir = Directory(mandatory=True, desc= "Freesurfer subjects directory")
    save_script_only = traits.Bool(False)

    datagrabber = traits.Instance(Data, ())
    surface_template = traits.Enum("fsaverage","fsaverage5","fsaverage6","fsaverage4","subject")
    test_name = traits.String('FS_one_sample_t_test')
    # First Level
    #advanced_options
    use_advanced_options = Bool(False)
    advanced_options = traits.Code()