def test_FEAT_inputs(): input_map = dict( args=dict(argstr='%s', ), environ=dict( nohash=True, usedefault=True, ), fsf_file=dict( argstr='%s', mandatory=True, position=0, ), ignore_exception=dict( nohash=True, usedefault=True, ), output_type=dict(), terminal_output=dict( mandatory=True, nohash=True, ), ) inputs = FEAT.input_spec() for key, metadata in input_map.items(): for metakey, value in metadata.items(): yield assert_equal, getattr(inputs.traits()[key], metakey), value
def test_FEAT_outputs(): output_map = dict(feat_dir=dict(), ) outputs = FEAT.output_spec() for key, metadata in output_map.items(): for metakey, value in metadata.items(): yield assert_equal, getattr(outputs.traits()[key], metakey), value
def test_FEAT_inputs(): input_map = dict(args=dict(argstr='%s', ), environ=dict(nohash=True, usedefault=True, ), fsf_file=dict(argstr='%s', mandatory=True, position=0, ), ignore_exception=dict(nohash=True, usedefault=True, ), output_type=dict(), terminal_output=dict(nohash=True, ), ) inputs = FEAT.input_spec() for key, metadata in input_map.items(): for metakey, value in metadata.items(): yield assert_equal, getattr(inputs.traits()[key], metakey), value
feat_node.inputs.bet = True feat_node.inputs.prewhitening = False feat_node.inputs.motion_regression = False feat_node.inputs.thresholding = 'uncorrected' feat_node.inputs.hrf = 'doublegamma' feat_node.inputs.open_feat_html = False feat_node.inputs.highpass = 100 feat_node.inputs.contrasts = 'single-trial' meta_wf.connect(select_files, 'func', feat_node, 'func_file') meta_wf.connect(modelgen_wf, 'outputspec.session_info', feat_node, 'session_info') meta_wf.connect(input_node, 'sub_id', feat_node, 'output_dirname') meta_wf.connect(select_files, 'func_mask', feat_node, 'mask') run_feat_node = Node(FEAT(), name='run_feat') meta_wf.connect(feat_node, 'feat_dir', run_feat_node, 'fsf_file') datasink = Node(interface=DataSink(), name='datasink') datasink.inputs.parameterization = False datasink.inputs.base_directory = out_dir meta_wf.connect(feat_node, 'confound_file', datasink, 'confound_file') meta_wf.connect(feat_node, 'ev_files', datasink, 'ev_files') meta_wf.connect(run_feat_node, 'feat_dir', datasink, 'firstlevel_FEAT') meta_wf.connect(concat_iterables_node, 'out', datasink, 'container') meta_wf.base_dir = op.join(out_dir, 'workingdir') meta_wf.run(plugin='MultiProc', plugin_args={'n_procs': 8})
def create_firstlevel_workflow_FEAT(name='level1feat'): input_node = pe.Node(IdentityInterface(fields=[ 'events_file', 'single_trial', 'sort_by_onset', 'exclude', 'func_file', 'TR', 'confound_file', 'which_confounds', 'extend_motion_pars', 'model_serial_correlations', 'hrf_base', 'hp_filter', 'contrasts' ]), name='inputspec') output_node = pe.Node( IdentityInterface(fields=['fsf_file', 'ev_file', 'feat_dir']), name='outputspec') level1_design = pe.MapNode(interface=Level1Design( bases={'dgamma': { 'derivs': True }}, interscan_interval=2.0, model_serial_correlations=True), iterfield=['contrasts', 'session_info'], name='level1_design') feat = pe.MapNode(interface=FEAT(), iterfield=['fsf_file'], name='FEAT') extract_task = pe.MapNode(interface=Extract_task, iterfield=['in_file'], name='extract_task') rename_feat_dir = pe.MapNode(interface=Rename_feat_dir, iterfield=['feat_dir', 'task'], name='rename_feat_dir') firstlevel_wf = pe.Workflow(name=name) modelgen_wf = create_modelgen_workflow() firstlevel_wf.connect(input_node, 'events_file', modelgen_wf, 'inputspec.events_file') firstlevel_wf.connect(input_node, 'func_file', modelgen_wf, 'inputspec.func_file') firstlevel_wf.connect(input_node, 'TR', modelgen_wf, 'inputspec.TR') firstlevel_wf.connect(input_node, 'single_trial', modelgen_wf, 'inputspec.single_trial') firstlevel_wf.connect(input_node, 'sort_by_onset', modelgen_wf, 'inputspec.sort_by_onset') firstlevel_wf.connect(input_node, 'extend_motion_pars', modelgen_wf, 'inputspec.extend_motion_pars') firstlevel_wf.connect(input_node, 'exclude', modelgen_wf, 'inputspec.exclude') firstlevel_wf.connect(input_node, 'confound_file', modelgen_wf, 'inputspec.confound_file') firstlevel_wf.connect(input_node, 'which_confounds', modelgen_wf, 'inputspec.which_confounds') firstlevel_wf.connect(input_node, 'hp_filter', modelgen_wf, 'inputspec.hp_filter') firstlevel_wf.connect(input_node, 'TR', level1_design, 'interscan_interval') firstlevel_wf.connect(input_node, 'model_serial_correlations', level1_design, 'model_serial_correlations') firstlevel_wf.connect(input_node, 'hrf_base', level1_design, 'bases') firstlevel_wf.connect(input_node, 'contrasts', level1_design, 'contrasts') firstlevel_wf.connect(modelgen_wf, 'outputspec.session_info', level1_design, 'session_info') firstlevel_wf.connect(level1_design, 'fsf_files', feat, 'fsf_file') firstlevel_wf.connect(level1_design, 'fsf_files', output_node, 'fsf_file') firstlevel_wf.connect(level1_design, 'ev_files', output_node, 'ev_file') firstlevel_wf.connect(input_node, 'func_file', extract_task, 'in_file') firstlevel_wf.connect(extract_task, 'task_name', rename_feat_dir, 'task') firstlevel_wf.connect(feat, 'feat_dir', rename_feat_dir, 'feat_dir') firstlevel_wf.connect(rename_feat_dir, 'feat_dir', output_node, 'feat_dir') return firstlevel_wf