Ejemplo n.º 1
0
class config(HasTraits):
    uuid = traits.Str(desc="UUID")

    # Directories
    working_dir = Directory(mandatory=True, desc="Location of the Nipype working directory")
    base_dir = Directory(os.path.abspath('.'),mandatory=True, desc='Base directory of data. (Should be subject-independent)')
    sink_dir = Directory(mandatory=True, desc="Location where the BIP will store the results")
    crash_dir = Directory(mandatory=False, desc="Location to store crash files")

    # Execution
    run_using_plugin = Bool(False, usedefault=True, desc="True to run pipeline with plugin, False to run serially")
    plugin = traits.Enum("PBS", "MultiProc", "SGE", "Condor",
        usedefault=True,
        desc="plugin to use, if run_using_plugin=True")
    plugin_args = traits.Dict({"qsub_args": "-q many"},
        usedefault=True, desc='Plugin arguments.')
    test_mode = Bool(False, mandatory=False, usedefault=True,
        desc='Affects whether where and if the workflow keeps its \
                            intermediary files. True to keep intermediary files. ')
    timeout = traits.Float(14.0)
    # Subjects
    #subjects = traits.List(traits.Str, mandatory=True, usedefault=True,
    #    desc="Subject id's. Note: These MUST match the subject id's in the \
    #                            Freesurfer directory. For simplicity, the subject id's should \
    #                            also match with the location of individual functional files.")
    #fwhm=traits.List(traits.Float())
    #copes_template = traits.String('%s/preproc/output/fwhm_%s/cope*.nii.gz')
    #varcopes_template = traits.String('%s/preproc/output/fwhm_%s/varcope*.nii.gz')
    #contrasts = traits.List(traits.Str,desc="contrasts")

    datagrabber = traits.Instance(Data, ())

    # Regression
    design_csv = traits.File(desc="design .csv file")
    reg_contrasts = traits.Code(desc="function named reg_contrasts which takes in 0 args and returns contrasts")

    #Normalization
    norm_template = traits.File(mandatory=True,desc='Template of files')

    #Correction:
    run_correction = traits.Bool(False)
    z_threshold = traits.Float(2.3)
    connectivity = traits.Int(25)
    do_randomize = traits.Bool(False)
    # Advanced Options
    use_advanced_options = traits.Bool()
    advanced_script = traits.Code()

    # Buttons
    check_func_datagrabber = Button("Check")
Ejemplo n.º 2
0
class config(BaseWorkflowConfig):
    uuid = traits.Str(desc="UUID")
    desc = traits.Str(desc="Workflow Description")

    # Directories
    sink_dir = Directory(os.path.abspath('.'),
                         mandatory=True,
                         desc="Location where the BIP will store the results")
    surf_dir = Directory(mandatory=True, desc="Freesurfer subjects directory")

    # Subjects

    #subjects= traits.List(traits.Str, mandatory=True, usedefault=True,
    #    desc="Subject id's. Note: These MUST match the subject id's in the \
    #                            Freesurfer directory. For simplicity, the subject id's should \
    #                            also match with the location of individual functional files.")

    datagrabber = traits.Instance(Data, ())
    # First Level

    subjectinfo = traits.Code()
    contrasts = traits.Code()
    interscan_interval = traits.Float()
    film_threshold = traits.Float()
    input_units = traits.Enum('scans', 'secs')
    is_sparse = traits.Bool(False)
    model_hrf = traits.Bool(True)
    stimuli_as_impulses = traits.Bool(True)
    use_temporal_deriv = traits.Bool(True)
    volumes_in_cluster = traits.Int(1)
    ta = traits.Float()
    tr = traits.Float()
    hpcutoff = traits.Float()
    scan_onset = traits.Int(0)
    scale_regressors = traits.Bool(True)
    #bases = traits.Dict({'dgamma':{'derivs': False}},use_default=True)
    bases = traits.Dict(
        {'dgamma': {
            'derivs': False
        }}, use_default=True
    )  #traits.Enum('dgamma','gamma','none'), traits.Enum(traits.Dict(traits.Enum('derivs',None), traits.Bool),None), desc="name of basis function and options e.g., {'dgamma': {'derivs': True}}")

    # preprocessing info
    preproc_config = traits.File(desc="preproc config file")
    use_compcor = traits.Bool(desc="use noise components from CompCor")
    #advanced_options
    use_advanced_options = Bool(False)
    advanced_options = traits.Code()
    save_script_only = traits.Bool(False)
class config(BaseWorkflowConfig):
    uuid = traits.Str(desc="UUID")

    # Directories
    base_dir = Directory(
        os.path.abspath('.'),
        mandatory=True,
        desc='Base directory of data. (Should be subject-independent)')
    sink_dir = Directory(mandatory=True,
                         desc="Location where the BIP will store the results")
    save_script_only = traits.Bool(False)

    # Subjects
    #subjects = traits.List(traits.Str, mandatory=True, usedefault=True,
    #    desc="Subject id's. Note: These MUST match the subject id's in the \
    #                            Freesurfer directory. For simplicity, the subject id's should \
    #                            also match with the location of individual functional files.")
    #fwhm=traits.List(traits.Float())
    #copes_template = traits.String('%s/preproc/output/fwhm_%s/cope*.nii.gz')
    #varcopes_template = traits.String('%s/preproc/output/fwhm_%s/varcope*.nii.gz')
    #contrasts = traits.List(traits.Str,desc="contrasts")

    datagrabber = traits.Instance(Data, ())

    # Regression
    design_csv = traits.File(desc="design .csv file")
    reg_contrasts = traits.Code(
        desc=
        "function named reg_contrasts which takes in 0 args and returns contrasts"
    )
    run_mode = traits.Enum("flame1", "ols", "flame12")
    #Normalization
    norm_template = traits.File(desc='Template of files')
    use_mask = traits.Bool(False)
    mask_file = traits.File()
    #Correction:
    run_correction = traits.Bool(False)
    p_threshold = traits.Float(0.05)
    z_threshold = traits.Float(2.3)
    connectivity = traits.Int(26)
    do_randomize = traits.Bool(False)
    num_iterations = traits.Int(5000)
    # Advanced Options
    use_advanced_options = traits.Bool()
    advanced_script = traits.Code()

    # Buttons
    check_func_datagrabber = Button("Check")
class config(HasTraits):
    uuid = traits.Str(desc="UUID")
    desc = traits.Str(desc="Workflow Description")
    # Directories
    working_dir = Directory(mandatory=True, desc="Location of the Nipype working directory")
    sink_dir = Directory(os.path.abspath('.'), mandatory=True, desc="Location where the BIP will store the results")
    crash_dir = Directory(mandatory=False, desc="Location to store crash files")
    surf_dir = Directory(mandatory=True, desc= "Freesurfer subjects directory")
    save_script_only = traits.Bool(False)
    # Execution

    run_using_plugin = Bool(False, usedefault=True, desc="True to run pipeline with plugin, False to run serially")
    plugin = traits.Enum("PBS", "MultiProc", "SGE", "Condor",
        usedefault=True,
        desc="plugin to use, if run_using_plugin=True")
    plugin_args = traits.Dict({"qsub_args": "-q many"},
        usedefault=True, desc='Plugin arguments.')
    test_mode = Bool(False, mandatory=False, usedefault=True,
        desc='Affects whether where and if the workflow keeps its \
                            intermediary files. True to keep intermediary files. ')
    timeout = traits.Float(14.0)
    datagrabber = traits.Instance(Data, ())
    surface_template = traits.Enum("fsaverage","fsaverage5","fsaverage6","fsaverage4","subject")
    test_name = traits.String('FS_one_sample_t_test')
    # First Level
    #advanced_options
    use_advanced_options = Bool(False)
    advanced_options = traits.Code()
Ejemplo n.º 5
0
class AutoRefreshDialog(traits.HasTraits):

    minutes = traits.Float(1.0)
    autoRefreshBool = traits.Bool()

    emailAlertBool = traits.Bool(False)
    soundAlertBool = traits.Bool(False)
    linesOfDataFrame = traits.Range(1, 10)
    alertCode = traits.Code(
        DEFAULT_ALERT_CODE,
        desc="python code for finding alert worthy elements")

    basicGroup = traitsui.Group("minutes", "autoRefreshBool")
    alertGroup = traitsui.VGroup(
        traitsui.HGroup(traitsui.Item("emailAlertBool"),
                        traitsui.Item("soundAlertBool")),
        traitsui.Item("linesOfDataFrame",
                      visible_when="emailAlertBool or soundAlertBool"),
        traitsui.Item("alertCode",
                      visible_when="emailAlertBool or soundAlertBool"))

    traits_view = traitsui.View(traitsui.VGroup(basicGroup, alertGroup),
                                title="auto refresh",
                                buttons=[OKButton],
                                kind='livemodal',
                                resizable=True)
Ejemplo n.º 6
0
class config(BaseWorkflowConfig):
    uuid = traits.Str(desc="UUID")

    # Directories
    base_dir = Directory(os.path.abspath('.'),mandatory=True, desc='Base directory of data. (Should be subject-independent)')
    sink_dir = Directory(mandatory=True, desc="Location where the BIP will store the results")

    # Subjects
    subjects = traits.List(traits.Str, mandatory=True, usedefault=True,
                          desc="Subject id's. Note: These MUST match the subject id's in the \
                                Freesurfer directory. For simplicity, the subject id's should \
                                also match with the location of individual functional files.")
    fwhm=traits.List(traits.Float())
    inputs_template = traits.String('%s/preproc/output/fwhm_%s/*.nii.gz')
    meanfunc_template = traits.String('%s/preproc/mean/*_mean.nii.gz')
    fsl_mat_template = traits.String('%s/preproc/bbreg/*.mat')
    unwarped_brain_template = traits.String('%s/smri/unwarped_brain/*.nii*')
    affine_transformation_template = traits.String('%s/smri/affine_transformation/*.nii*')
    warp_field_template = traits.String('%s/smri/warped_field/*.nii*')

    
    #Normalization
    standard_transform_template = traits.File(mandatory=True,desc='Standard template to warp to')
    standard_warp_field_template = traits.String()
    standard_affine_transformation_template = traits.String()
    standard_norm_template = traits.File()
    standard_warp_field_template = traits.File()
    standard_affine_transformation_template = traits.File()
    # Advanced Options
    use_advanced_options = traits.Bool()
    advanced_script = traits.Code()

    # Buttons
    check_func_datagrabber = Button("Check")
class config(BaseWorkflowConfig):
    uuid = traits.Str(desc="UUID")

    # Directories
    base_dir = Directory(os.path.abspath('.'),mandatory=True, desc='Base directory of data. (Should be subject-independent)')
    sink_dir = Directory(mandatory=True, desc="Location where the BIP will store the results")

    datagrabber = traits.Instance(Data, ())
    run_mode = traits.Enum("flame1","ols","flame12")
    save_script_only = traits.Bool(False)
    #Normalization
    brain_mask = traits.File(mandatory=True,desc='Brain Mask')
    name_of_project = traits.String("group_analysis",usedefault=True)
    do_randomize = traits.Bool(True)
    num_iterations = traits.Int(5000)

    #Correction:
    run_correction = traits.Bool(True)
    z_threshold = traits.Float(2.3)
    p_threshold = traits.Float(0.05)
    connectivity = traits.Int(26)

    # Advanced Options
    use_advanced_options = traits.Bool()
    advanced_script = traits.Code()

    # Buttons
    check_func_datagrabber = Button("Check")
Ejemplo n.º 8
0
class config(HasTraits):
    uuid = traits.Str(desc="UUID")
    desc = traits.Str(desc='Workflow description')
    # Directories
    working_dir = Directory(mandatory=True,
                            desc="Location of the Nipype working directory")
    crash_dir = Directory(mandatory=False,
                          desc="Location to store crash files")
    save_script_only = traits.Bool(False)
    sink_dir = Directory(mandatory=True, desc="Location to store results")
    # Execution

    run_using_plugin = Bool(
        False,
        usedefault=True,
        desc="True to run pipeline with plugin, False to run serially")
    plugin = traits.Enum("PBS",
                         "PBSGraph",
                         "MultiProc",
                         "SGE",
                         "Condor",
                         usedefault=True,
                         desc="plugin to use, if run_using_plugin=True")
    plugin_args = traits.Dict({"qsub_args": "-q many"},
                              usedefault=True,
                              desc='Plugin arguments.')
    # Subjects
    datagrabber = traits.Instance(Data, ())
    name = traits.String('mean')
    # Advanced Options
    use_advanced_options = traits.Bool()
    advanced_script = traits.Code()
Ejemplo n.º 9
0
class config(HasTraits):
    uuid = traits.Str(desc="UUID")
    desc = traits.Str(desc='Workflow description')
    # Directories
    working_dir = Directory(mandatory=True, desc="Location of the Nipype working directory")
    sink_dir = Directory(mandatory=True, desc="Location where the BIP will store the results")
    crash_dir = Directory(mandatory=False, desc="Location to store crash files")

    # Execution

    run_using_plugin = Bool(False, usedefault=True, desc="True to run pipeline with plugin, False to run serially")
    plugin = traits.Enum("PBS", "PBSGraph","MultiProc", "SGE", "Condor",
        usedefault=True,
        desc="plugin to use, if run_using_plugin=True")
    plugin_args = traits.Dict({"qsub_args": "-q many"},
        usedefault=True, desc='Plugin arguments.')
    test_mode = Bool(False, mandatory=False, usedefault=True,
        desc='Affects whether where and if the workflow keeps its \
                            intermediary files. True to keep intermediary files. ')
    # Subjects

    subjects= traits.List(traits.Str, mandatory=True, usedefault=True,
        desc="Subject id's. Note: These MUST match the subject id's in the \
                                Freesurfer directory. For simplicity, the subject id's should \
                                also match with the location of individual functional files.")
    # Preprocessing info
    preproc_config = traits.File(desc="preproc json file")

    #Advanced
    use_advanced_options = traits.Bool()
    advanced_script = traits.Code()
    save_script_only = traits.Bool(False)
Ejemplo n.º 10
0
class config(HasTraits):
    uuid = traits.Str(desc="UUID")
    desc = traits.Str(desc='Workflow description')
    # Directories
    working_dir = Directory(mandatory=True, desc="Location of the Nipype working directory")
    crash_dir = Directory(mandatory=False, desc="Location to store crash files")
    sink_dir = Directory(mandatory=True, desc="Location to store results")
    save_script_only = traits.Bool(False)
    # Execution

    run_using_plugin = Bool(False, usedefault=True, desc="True to run pipeline with plugin, False to run serially")
    plugin = traits.Enum("PBS", "PBSGraph","MultiProc", "SGE", "Condor",
        usedefault=True,
        desc="plugin to use, if run_using_plugin=True")
    plugin_args = traits.Dict({"qsub_args": "-q many"},
        usedefault=True, desc='Plugin arguments.')
    # Subjects
    interpolation = traits.Enum('trilinear','nearestneighbour','sinc',usedefault=True)
    name = traits.String('flirt_output',desc='name of folder to store flirt mats')
    datagrabber_create = traits.Instance(Data, ())
    datagrabber_apply = traits.Instance(Data, ())
    create_transform = traits.Bool(True)
    apply_transform = traits.Bool(False)
    # Advanced Options
    use_advanced_options = traits.Bool()
    advanced_script = traits.Code()
Ejemplo n.º 11
0
class config(HasTraits):
    uuid = traits.Str(desc="UUID")
    desc = traits.Str(desc="Workflow Description")
    # Directories
    working_dir = Directory(mandatory=True, desc="Location of the Nipype working directory")
    sink_dir = Directory(os.path.abspath('.'), mandatory=True, desc="Location where the BIP will store the results")
    crash_dir = Directory(mandatory=False, desc="Location to store crash files")
    surf_dir = Directory(mandatory=True, desc= "Freesurfer subjects directory")

    # Execution

    run_using_plugin = Bool(False, usedefault=True, desc="True to run pipeline with plugin, False to run serially")
    plugin = traits.Enum("PBS", "MultiProc", "SGE", "Condor",
        usedefault=True,
        desc="plugin to use, if run_using_plugin=True")
    plugin_args = traits.Dict({"qsub_args": "-q many"},
        usedefault=True, desc='Plugin arguments.')
    test_mode = Bool(False, mandatory=False, usedefault=True,
        desc='Affects whether where and if the workflow keeps its \
                            intermediary files. True to keep intermediary files. ')
    timeout = traits.Float(14.0)
    subjects = traits.List(desc="subjects")
    split_files = traits.List(traits.File(),desc="""list of split files""")
    # First Level
    #advanced_options
    use_advanced_options = Bool(False)
    advanced_options = traits.Code()
Ejemplo n.º 12
0
class config(BaseWorkflowConfig):
    uuid = traits.Str(desc="UUID")

    # Directories
    base_dir = Directory(
        os.path.abspath('.'),
        mandatory=True,
        desc='Base directory of data. (Should be subject-independent)')
    sink_dir = Directory(mandatory=True,
                         desc="Location where the BIP will store the results")
    surf_dir = Directory(os.environ['SUBJECTS_DIR'],
                         desc='Freesurfer subjects dir')
    save_script_only = traits.Bool(False)

    # Subjects
    subjects = traits.List(
        traits.Str,
        mandatory=True,
        usedefault=True,
        desc="Subject id's. Note: These MUST match the subject id's in the \
                                Freesurfer directory. For simplicity, the subject id's should \
                                also match with the location of individual functional files."
    )
    # Advanced Options
    use_advanced_options = traits.Bool()
    advanced_script = traits.Code()
Ejemplo n.º 13
0
def getNode(_type,tr,config):
    from bips.workflows.flexible_datagrabber import Data, DataBase
    if _type == type(traits.Int()):
            col_type = colander.SchemaNode(colander.Int(),
                                           name=tr,description=config.trait(tr).desc)
    elif _type == type(traits.Float()):
        col_type = colander.SchemaNode(colander.Decimal(),name=tr)    
        
    elif _type == type(traits.String()) or _type==type(traits.Str()):
        col_type = colander.SchemaNode(colander.String(),name=tr)
        
    elif _type == type(traits.Enum('')):
        values=config.trait(tr).trait_type.values
        the_values = []
        for v in values:
            the_values.append((v,v))
        col_type = colander.SchemaNode(
            deform.Set(),
            widget=deform.widget.SelectWidget(values=the_values),
            name=tr)
    elif _type == type(traits.Bool()):
        col_type = colander.SchemaNode(colander.Boolean(),widget=deform.widget.CheckboxWidget(),name=tr)
    elif _type == type(traits.Code()):
        col_type = colander.SchemaNode(colander.String(),name=tr,widget=deform.widget.TextAreaWidget(cols=100,rows=20))
    elif _type == type(traits.Instance(Data,())):
        from bips.workflows.flexible_datagrabber import create_datagrabber_html_view
        col_type = create_datagrabber_html_view() 
    elif _type == type(traits.List()):
        col_type =get_list(_type,tr,config) 
    else:
        print "type: ", _type, "not found!"
        col_type = colander.SchemaNode(colander.String(),name=tr)
    return col_type
class config(BaseWorkflowConfig):
    uuid = traits.Str(desc="UUID")
    desc = traits.Str(desc="Workflow Description")
    # Directories
    sink_dir = Directory(os.path.abspath('.'),
                         mandatory=True,
                         desc="Location where the BIP will store the results")
    surf_dir = Directory(mandatory=True, desc="Freesurfer subjects directory")
    save_script_only = traits.Bool(False)

    datagrabber = traits.Instance(Data, ())
    projection_stem = traits.Str('-projfrac-avg 0 1 0.1',
                                 desc='how to project data onto the surface')
    out_type = traits.Enum('mat', 'hdf5', desc='mat or hdf5')
    hdf5_package = traits.Enum('h5py',
                               'pytables',
                               desc='which hdf5 package to use')
    target_surf = traits.Enum('fsaverage4',
                              'fsaverage3',
                              'fsaverage5',
                              'fsaverage6',
                              'fsaverage',
                              'subject',
                              desc='which average surface to map to')
    surface_fwhm = traits.List([5],
                               traits.Float(),
                               mandatory=True,
                               usedefault=True,
                               desc="How much to smooth on target surface")
    roiname = traits.String('amygdala')
    use_advanced_options = Bool(False)
    advanced_options = traits.Code()
class config(BaseWorkflowConfig):
    uuid = traits.Str(desc="UUID")
    desc = traits.Str(desc='Workflow description')
    # Directories
    base_dir = Directory(
        exists=True,
        desc='Base directory of data. (Should be subject-independent)')
    sink_dir = Directory(mandatory=True,
                         desc="Location where the BIP will store the results")
    field_dir = Directory(
        exists=True,
        desc="Base directory of field-map data (Should be subject-independent) \
                                                 Set this value to None if you don't want fieldmap distortion correction"
    )
    surf_dir = Directory(mandatory=True, desc="Freesurfer subjects directory")
    save_script_only = traits.Bool(False)

    # Subjects

    datagrabber = traits.Instance(Data, ())
    TR = traits.Float(6.0)
    preproc_config = traits.File(desc="preproc config file")
    json_name = traits.String('preproc_metrics')
    # Advanced Options
    use_advanced_options = traits.Bool()
    advanced_script = traits.Code()
Ejemplo n.º 16
0
class config(HasTraits):
    uuid = traits.Str(desc="UUID")
    desc = traits.Str(desc='Workflow description')
    # Directories
    working_dir = Directory(mandatory=True,
                            desc="Location of the Nipype working directory")
    base_dir = Directory(
        exists=True,
        desc='Base directory of data. (Should be subject-independent)')
    sink_dir = Directory(mandatory=True,
                         desc="Location where the BIP will store the results")
    field_dir = Directory(
        exists=True,
        desc="Base directory of field-map data (Should be subject-independent) \
                                                 Set this value to None if you don't want fieldmap distortion correction"
    )
    crash_dir = Directory(mandatory=False,
                          desc="Location to store crash files")
    json_sink = Directory(mandatory=False, desc="Location to store json_files")
    surf_dir = Directory(mandatory=True, desc="Freesurfer subjects directory")

    # Execution

    run_using_plugin = Bool(
        False,
        usedefault=True,
        desc="True to run pipeline with plugin, False to run serially")
    plugin = traits.Enum("PBS",
                         "PBSGraph",
                         "MultiProc",
                         "SGE",
                         "Condor",
                         usedefault=True,
                         desc="plugin to use, if run_using_plugin=True")
    plugin_args = traits.Dict({"qsub_args": "-q many"},
                              usedefault=True,
                              desc='Plugin arguments.')
    test_mode = Bool(
        False,
        mandatory=False,
        usedefault=True,
        desc='Affects whether where and if the workflow keeps its \
                            intermediary files. True to keep intermediary files. '
    )
    # Subjects

    subjects = traits.List(
        traits.Str,
        mandatory=True,
        usedefault=True,
        desc=
        "Subject id's. These subjects must match the ones that have been run in your preproc config"
    )

    preproc_config = traits.File(desc="preproc config file")
    debug = traits.Bool(True)
    # Advanced Options
    use_advanced_options = traits.Bool()
    advanced_script = traits.Code()
Ejemplo n.º 17
0
class config(HasTraits):
    uuid = traits.Str(desc="UUID")

    # Directories
    working_dir = Directory(mandatory=True,
                            desc="Location of the Nipype working directory")
    base_dir = Directory(
        os.path.abspath('.'),
        mandatory=True,
        desc='Base directory of data. (Should be subject-independent)')
    sink_dir = Directory(mandatory=True,
                         desc="Location where the BIP will store the results")
    crash_dir = Directory(mandatory=False,
                          desc="Location to store crash files")

    # Execution
    run_using_plugin = Bool(
        False,
        usedefault=True,
        desc="True to run pipeline with plugin, False to run serially")
    plugin = traits.Enum("PBS",
                         "MultiProc",
                         "SGE",
                         "Condor",
                         usedefault=True,
                         desc="plugin to use, if run_using_plugin=True")
    plugin_args = traits.Dict({"qsub_args": "-q many"},
                              usedefault=True,
                              desc='Plugin arguments.')
    test_mode = Bool(
        False,
        mandatory=False,
        usedefault=True,
        desc='Affects whether where and if the workflow keeps its \
                            intermediary files. True to keep intermediary files. '
    )
    timeout = traits.Float(14.0)
    datagrabber = traits.Instance(Data, ())
    run_mode = traits.Enum("flame1", "ols", "flame12")
    save_script_only = traits.Bool(False)
    #Normalization
    brain_mask = traits.File(mandatory=True, desc='Brain Mask')
    name_of_project = traits.String("group_analysis", usedefault=True)
    do_randomize = traits.Bool(True)
    num_iterations = traits.Int(5000)

    #Correction:
    run_correction = traits.Bool(True)
    z_threshold = traits.Float(2.3)
    p_threshold = traits.Float(0.05)
    connectivity = traits.Int(26)

    # Advanced Options
    use_advanced_options = traits.Bool()
    advanced_script = traits.Code()

    # Buttons
    check_func_datagrabber = Button("Check")
Ejemplo n.º 18
0
class config(HasTraits):
    uuid = traits.Str(desc="UUID")
    desc = traits.Str(desc="Workflow Description")
    # Directories
    working_dir = Directory(mandatory=True,
                            desc="Location of the Nipype working directory")
    sink_dir = Directory(os.path.abspath('.'),
                         mandatory=True,
                         desc="Location where the BIP will store the results")
    crash_dir = Directory(mandatory=False,
                          desc="Location to store crash files")
    surf_dir = Directory(mandatory=True, desc="Freesurfer subjects directory")

    # Execution

    run_using_plugin = Bool(
        False,
        usedefault=True,
        desc="True to run pipeline with plugin, False to run serially")
    plugin = traits.Enum("PBS",
                         "MultiProc",
                         "SGE",
                         "Condor",
                         usedefault=True,
                         desc="plugin to use, if run_using_plugin=True")
    plugin_args = traits.Dict({"qsub_args": "-q many"},
                              usedefault=True,
                              desc='Plugin arguments.')
    test_mode = Bool(
        False,
        mandatory=False,
        usedefault=True,
        desc='Affects whether where and if the workflow keeps its \
                            intermediary files. True to keep intermediary files. '
    )
    timeout = traits.Float(14.0)
    datagrabber = traits.Instance(Data, ())
    projection_stem = traits.Str('-projfrac-avg 0 1 0.1',
                                 desc='how to project data onto the surface')
    out_type = traits.Enum('mat', 'hdf5', desc='mat or hdf5')
    hdf5_package = traits.Enum('h5py',
                               'pytables',
                               desc='which hdf5 package to use')
    target_surf = traits.Enum('fsaverage4',
                              'fsaverage3',
                              'fsaverage5',
                              'fsaverage6',
                              'fsaverage',
                              'subject',
                              desc='which average surface to map to')
    surface_fwhm = traits.List([5],
                               traits.Float(),
                               mandatory=True,
                               usedefault=True,
                               desc="How much to smooth on target surface")
    roiname = traits.String('amygdala')
    use_advanced_options = Bool(False)
    advanced_options = traits.Code()
Ejemplo n.º 19
0
class config(HasTraits):
    uuid = traits.Str(desc="UUID")

    # Directories
    working_dir = Directory(mandatory=True, desc="Location of the Nipype working directory")
    base_dir = Directory(os.path.abspath('.'),mandatory=True, desc='Base directory of data. (Should be subject-independent)')
    sink_dir = Directory(mandatory=True, desc="Location where the BIP will store the results")
    crash_dir = Directory(mandatory=False, desc="Location to store crash files")

    # Execution
    run_using_plugin = Bool(False, usedefault=True, desc="True to run pipeline with plugin, False to run serially")
    plugin = traits.Enum("PBS", "MultiProc", "SGE", "Condor",
                         usedefault=True,
                         desc="plugin to use, if run_using_plugin=True")
    plugin_args = traits.Dict({"qsub_args": "-q many"},
                                                      usedefault=True, desc='Plugin arguments.')
    test_mode = Bool(False, mandatory=False, usedefault=True,
                     desc='Affects whether where and if the workflow keeps its \
                            intermediary files. True to keep intermediary files. ')
    timeout = traits.Float(14.0)
    # Subjects

    datagrabber = traits.Instance(Data, ())

    #Normalization
    norm_template = traits.File(mandatory=True,desc='Template to warp to')
    use_nearest = traits.Bool(False,desc="use nearest neighbor interpolation")
    do_segment = traits.Bool(True)
    surf_dir = traits.Directory()
    moving_images_4D = traits.Bool(True, usedefault=True, desc="True if your moving image inputs \
                                         are time series images, False if they are 3-dimensional")
    # Advanced Options
    use_advanced_options = traits.Bool()
    advanced_script = traits.Code()
    save_script_only = traits.Bool(False)
    # Buttons
    check_func_datagrabber = Button("Check")

    def _check_func_datagrabber_fired(self):
        subs = self.subjects
        template = [self.inputs_template,
                    self.meanfunc_template,
                    self.fsl_mat_template,
                    self.unwarped_brain_template,
                    self.affine_transformation_template,
                    self.warp_field_template]
        for s in subs:
            for t in template:
                try:
                    temp = glob(os.path.join(self.base_dir,t%s))
                except TypeError:
                    temp = []
                    for f in self.fwhm:
                        temp.append(glob(os.path.join(self.base_dir,t%(s,f))))
                print temp
Ejemplo n.º 20
0
class config(HasTraits):
    uuid = traits.Str(desc="UUID")
    desc = traits.Str(desc='Workflow description')
    # Directories
    working_dir = Directory(mandatory=True,
                            desc="Location of the Nipype working directory")
    crash_dir = Directory(mandatory=False,
                          desc="Location to store crash files")
    sink_dir = Directory(mandatory=False,
                         desc="Location to store BIPS results")
    surf_dir = Directory(mandatory=True, desc="Freesurfer subjects directory")
    save_script_only = traits.Bool(False)
    # Execution

    run_using_plugin = Bool(
        False,
        usedefault=True,
        desc="True to run pipeline with plugin, False to run serially")
    plugin = traits.Enum("PBS",
                         "PBSGraph",
                         "MultiProc",
                         "SGE",
                         "Condor",
                         usedefault=True,
                         desc="plugin to use, if run_using_plugin=True")
    plugin_args = traits.Dict({"qsub_args": "-q many"},
                              usedefault=True,
                              desc='Plugin arguments.')
    # Subjects
    datagrabber = traits.Instance(Data, ())
    # Motion Correction

    do_slicetiming = Bool(True,
                          usedefault=True,
                          desc="Perform slice timing correction")
    SliceOrder = traits.List(traits.Int)
    TR = traits.Float(1.0, mandatory=True, desc="TR of functional")
    motion_correct_node = traits.Enum(
        'nipy',
        'fsl',
        'spm',
        'afni',
        desc="motion correction algorithm to use",
        usedefault=True,
    )
    use_metadata = traits.Bool(True)
    order = traits.Enum('motion_slicetime',
                        'slicetime_motion',
                        use_default=True)
    loops = traits.List([5], traits.Int(5), usedefault=True)
    #between_loops = traits.Either("None",traits.List([5]),usedefault=True)
    speedup = traits.List([5], traits.Int(5), usedefault=True)
    # Advanced Options
    use_advanced_options = traits.Bool()
    advanced_script = traits.Code()
class config(BaseWorkflowConfig):
    uuid = traits.Str(desc="UUID")

    # Directories
    base_dir = Directory(
        os.path.abspath('.'),
        mandatory=True,
        desc='Base directory of data. (Should be subject-independent)')
    sink_dir = Directory(mandatory=True,
                         desc="Location where the BIP will store the results")
    save_script_only = traits.Bool(False)

    datagrabber = traits.Instance(Data, ())

    # Regression
    run_one_sample_T_test = traits.Bool(True)
    run_regression = traits.Bool()
    design_csv = traits.File(desc="design .csv file")
    reg_contrasts = traits.Code(
        desc=
        "function named reg_contrasts which takes in 0 args and returns contrasts"
    )
    use_regressors = traits.Bool()
    estimation_method = traits.Enum('Classical', 'Bayesian', 'Bayesian2')
    include_intercept = traits.Bool(True)
    #Normalization

    norm_template = traits.File(desc='Template of files')
    use_mask = traits.Bool(False)
    mask_file = traits.File(desc='already binarized mask file to use')

    #Correction:
    p_threshold = traits.Float(0.05)
    height_threshold = traits.Float(0.05)
    min_cluster_size = traits.Int(25)
    # Advanced Options
    use_advanced_options = traits.Bool()
    advanced_script = traits.Code()

    # Buttons
    check_func_datagrabber = Button("Check")
class config(BaseWorkflowConfig):
    uuid = traits.Str(desc="UUID")
    desc = traits.Str(desc='Workflow description')
    # Directories
    save_script_only = traits.Bool(False)

    # Subjects

    datagrabber = traits.Instance(Data, ())
    dtype = traits.Enum('float', 'short', 'bool', 'int')
    # Advanced Options
    use_advanced_options = traits.Bool()
    advanced_script = traits.Code()
Ejemplo n.º 23
0
class config(BaseWorkflowConfig):
    uuid = traits.Str(desc="UUID")
    desc = traits.Str(desc="Workflow Description")
    # Directories
    sink_dir = Directory(os.path.abspath('.'),
                         mandatory=True,
                         desc="Location where the BIP will store the results")
    save_script_only = traits.Bool(False)

    datagrabber = traits.Instance(Data, ())

    use_advanced_options = Bool(False)
    advanced_options = traits.Code()
Ejemplo n.º 24
0
class config(BaseWorkflowConfig):
    uuid = traits.Str(desc="UUID")
    desc = traits.Str(desc='Workflow description')
    # Directories
    save_script_only = traits.Bool(False)
    sink_dir = Directory(mandatory=True, desc="Location to store results")

    # Subjects
    datagrabber = traits.Instance(Data, ())
    name = traits.String('mean')
    # Advanced Options
    use_advanced_options = traits.Bool()
    advanced_script = traits.Code()
Ejemplo n.º 25
0
class config(HasTraits):
    uuid = traits.Str(desc="UUID")

    # Directories
    working_dir = Directory(mandatory=True,
                            desc="Location of the Nipype working directory")
    base_dir = Directory(
        os.path.abspath('.'),
        mandatory=True,
        desc='Base directory of data. (Should be subject-independent)')
    sink_dir = Directory(mandatory=True,
                         desc="Location where the BIP will store the results")
    crash_dir = Directory(mandatory=False,
                          desc="Location to store crash files")
    surf_dir = Directory(os.environ['SUBJECTS_DIR'],
                         desc='Freesurfer subjects dir')
    save_script_only = traits.Bool(False)

    # Execution
    run_using_plugin = Bool(
        False,
        usedefault=True,
        desc="True to run pipeline with plugin, False to run serially")
    plugin = traits.Enum("PBS",
                         "MultiProc",
                         "SGE",
                         "Condor",
                         usedefault=True,
                         desc="plugin to use, if run_using_plugin=True")
    plugin_args = traits.Dict({"qsub_args": "-q max10"},
                              usedefault=True,
                              desc='Plugin arguments.')
    test_mode = Bool(
        False,
        mandatory=False,
        usedefault=True,
        desc='Affects whether where and if the workflow keeps its \
                            intermediary files. True to keep intermediary files. '
    )
    # Subjects
    subjects = traits.List(
        traits.Str,
        mandatory=True,
        usedefault=True,
        desc="Subject id's. Note: These MUST match the subject id's in the \
                                Freesurfer directory. For simplicity, the subject id's should \
                                also match with the location of individual functional files."
    )
    # Advanced Options
    use_advanced_options = traits.Bool()
    advanced_script = traits.Code()
Ejemplo n.º 26
0
class config(BaseWorkflowConfig):
    uuid = traits.Str(desc="UUID")
    desc = traits.Str(desc="Workflow Description")

    # Directories
    sink_dir = Directory(os.path.abspath('.'), mandatory=True, desc="Location where the BIP will store the results")
    save_script_only = traits.Bool(False)

    # Subjects
    datagrabber = traits.Instance(Data, ())

    # Stimulus Motion
    subjectinfo = traits.Code()
    is_sparse = traits.Bool(False)
Ejemplo n.º 27
0
class config(BaseWorkflowConfig):
    uuid = traits.Str(desc="UUID")

    # Directories
    base_dir = Directory(
        os.path.abspath('.'),
        mandatory=True,
        desc='Base directory of data. (Should be subject-independent)')
    sink_dir = Directory(mandatory=True,
                         desc="Location where the BIP will store the results")

    # Subjects

    datagrabber = traits.Instance(Data, ())

    #Normalization
    norm_template = traits.File(mandatory=True, desc='Template to warp to')
    use_nearest = traits.Bool(False, desc="use nearest neighbor interpolation")
    do_segment = traits.Bool(True)
    surf_dir = traits.Directory()
    moving_images_4D = traits.Bool(True,
                                   usedefault=True,
                                   desc="True if your moving image inputs \
                                         are time series images, False if they are 3-dimensional"
                                   )
    # Advanced Options
    use_advanced_options = traits.Bool()
    advanced_script = traits.Code()
    save_script_only = traits.Bool(False)
    # Buttons
    check_func_datagrabber = Button("Check")

    def _check_func_datagrabber_fired(self):
        subs = self.subjects
        template = [
            self.inputs_template, self.meanfunc_template,
            self.fsl_mat_template, self.unwarped_brain_template,
            self.affine_transformation_template, self.warp_field_template
        ]
        for s in subs:
            for t in template:
                try:
                    temp = glob(os.path.join(self.base_dir, t % s))
                except TypeError:
                    temp = []
                    for f in self.fwhm:
                        temp.append(
                            glob(os.path.join(self.base_dir, t % (s, f))))
                print temp
class config(BaseWorkflowConfig):
    uuid = traits.Str(desc="UUID")
    desc = traits.Str(desc="Workflow Description")
    # Directories
    sink_dir = Directory(os.path.abspath('.'), mandatory=True, desc="Location where the BIP will store the results")
    surf_dir = Directory(mandatory=True, desc= "Freesurfer subjects directory")
    save_script_only = traits.Bool(False)

    datagrabber = traits.Instance(Data, ())
    surface_template = traits.Enum("fsaverage","fsaverage5","fsaverage6","fsaverage4","subject")
    test_name = traits.String('FS_one_sample_t_test')
    # First Level
    #advanced_options
    use_advanced_options = Bool(False)
    advanced_options = traits.Code()
Ejemplo n.º 29
0
class config(BaseWorkflowConfig):
    uuid = traits.Str(desc="UUID")
    desc = traits.Str(desc='Workflow description')
    # Directories
    working_dir = Directory(mandatory=True,
                            desc="Location of the Nipype working directory")
    base_dir = Directory(
        exists=True,
        desc='Base directory of data. (Should be subject-independent)')
    sink_dir = Directory(mandatory=True,
                         desc="Location where the BIP will store the results")
    field_dir = Directory(
        exists=True,
        desc="Base directory of field-map data (Should be subject-independent) \
                                                 Set this value to None if you don't want fieldmap distortion correction"
    )
    crash_dir = Directory(mandatory=False,
                          desc="Location to store crash files")
    json_sink = Directory(mandatory=False, desc="Location to store json_files")
    surf_dir = Directory(mandatory=True, desc="Freesurfer subjects directory")

    # Subjects

    subjects = traits.List(
        traits.Str,
        mandatory=True,
        usedefault=True,
        desc=
        "Subject id's. These subjects must match the ones that have been run in your preproc config"
    )

    preproc_config = traits.File(desc="preproc config file")
    debug = traits.Bool(True)

    #Preprocessing Info
    use_custom_ROI_list_file = Bool(
        False,
        usedefault=True,
        desc=
        "True to limit the produced TSNR table to a more selective list of ROIs"
    )
    custom_ROI_list_file = traits.File(
        desc="Enter the full path to your customized FreeSurferColorLUT.txt")

    # Advanced Options
    use_advanced_options = traits.Bool()
    advanced_script = traits.Code()
    save_script_only = traits.Bool(False)
Ejemplo n.º 30
0
class config(BaseWorkflowConfig):
    uuid = traits.Str(desc="UUID")
    desc = traits.Str(desc='Workflow description')
    # Directories
    working_dir = Directory(mandatory=True,
                            desc="Location of the Nipype working directory")
    crash_dir = Directory(mandatory=False,
                          desc="Location to store crash files")
    surf_dir = Directory(mandatory=True, desc="Freesurfer subjects directory")
    save_script_only = traits.Bool(False)

    # Subjects
    interpolation = traits.Enum('trilin', 'nearest', usedefault=True)
    datagrabber = traits.Instance(Data, ())
    # Advanced Options
    use_advanced_options = traits.Bool()
    advanced_script = traits.Code()