Example #1
0
l1analysis = Workflow(name='l1analysis')
# connect the 1st-level analysis components
l1analysis.connect(l1model, 'session_info', l1design, 'session_info')
l1analysis.connect(l1design, 'spm_mat_file', l1estimate, 'spm_mat_file')
l1analysis.connect(l1estimate, 'spm_mat_file', l1contrasts, 'spm_mat_file')
l1analysis.connect(l1estimate, 'beta_images', l1contrasts, 'beta_images')
l1analysis.connect(l1estimate, 'residual_image', l1contrasts, 'residual_image')
# ======================================================================
# DEFINE META-WORKFLOW PIPELINE:
# ======================================================================
# initiation of the 1st-level analysis workflow:
l1pipeline = Workflow(name='l1pipeline')
# stop execution of the workflow if an error is encountered:
l1pipeline.config = {
    'execution': {
        'stop_on_first_crash': True,
        'hash_method': 'timestamp'
    }
}
# define the base directory of the workflow:
l1pipeline.base_dir = opj(path_root, 'work')
# ======================================================================
# ENABLE LOGGING:
# ======================================================================
# enable logging to file:
#config.enable_debug_mode()
#config.update_config({'logging': {'log_directory': os.getcwd(),
#                                  'log_to_file': True}})
#logging.update_logging(config)
# ======================================================================
# CONNECT WORKFLOW NODES:
# ======================================================================
    number_of_iterations=[[1000, 500, 250, 100], [1000, 500, 250, 100],
                          [100, 70, 50, 20]],
    radius_or_number_of_bins=[32, 32, 4],
    sampling_percentage=[0.25, 0.25, 1],
    sampling_strategy=['Regular', 'Regular', 'None'],
    shrink_factors=[[8, 4, 2, 1]] * 3,
    transform_parameters=[(0.1, ), (0.1, ), (0.1, 3.0, 0.0)],
    use_histogram_matching=True,
    write_composite_transform=False),
               name='antsreg')

deform_workflow = Workflow(name="deform_workflow")
deform_workflow.base_dir = os.path.join(output_dir, 'deform_workingdir')
deform_workflow.config = {
    "execution": {
        "crashdump_dir": os.path.join(output_dir, 'deform_crashdumps')
    }
}
deform_workflow.connect([(getmusemri_baseline, reorient, [('musemri',
                                                           'in_file')]),
                         (reorient, antsreg, [('out_file', 'moving_image')])])

# Step 2: Concatenate transformations and bring baseline MPRAGE into MNI space

# We have to reverse the "forward transforms" output of antsreg Node so that affine and deformable warps are applied
# in the correct order
reverselist = Node(Function(input_names=['l'],
                            output_names=['revlist'],
                            function=reverse_list),
                   name='reverselist')
Example #3
0
realign_qc = Node(interface=realign_snapshots(splitTime=0), name="realign_qc")

datasink = Node(interface=nio.DataSink(), name="datasink")
datasink.inputs.base_directory = output_dir
datasink.inputs.container = os.path.join('output', 'realign_wf')
datasink.inputs.substitutions = [('_roi', ''), ('_merged', ''),
                                 ('_reoriented', ''), ('_unpadded', ''),
                                 ('_nanto0', ''), ('_masked', '')]
datasink.inputs.regexp_substitutions = [(r'_\d+\.\d+to\d+\.\d+min', r'')]

realign_workflow = Workflow(name="realign_workflow")
realign_workflow.base_dir = os.path.join(output_dir, 'realign_workingdir')
realign_workflow.config = {
    "execution": {
        "crashdump_dir": os.path.join(output_dir, 'realign_crashdumps')
    }
}
realign_workflow.connect([
    (getpet4D, reorient, [('pet4D', 'in_file')]),
    (reorient, realign, [('out_file', 'in_files')]),

    # remove low signal slices at image boundary
    (realign, unpad, [('realigned_files', 'timeSeriesImgFile')]),

    # get mask of within-field voxels after realignment
    (unpad, nan_mask_4D, [('unpaddedImgFile', 'in_file')]),
    #(realign, nan_mask_4D, [('realigned_files','in_file')]),
    (nan_mask_4D, nan_mask, [('out_file', 'in_file')]),
    # invert nan mask to get not-nan mask
    (nan_mask, mulneg1, [('out_file', 'in_file')]),
Example #4
0
# initiation of the 1st-level analysis workflow:
l1analysis = Workflow(name='l1analysis')
# connect the 1st-level analysis components
l1analysis.connect(l1model, 'session_info', l1design, 'session_info')
l1analysis.connect(l1design, 'spm_mat_file', l1estimate, 'spm_mat_file')
l1analysis.connect(l1estimate, 'spm_mat_file', l1contrasts, 'spm_mat_file')
l1analysis.connect(l1estimate, 'beta_images', l1contrasts, 'beta_images')
l1analysis.connect(l1estimate, 'residual_image', l1contrasts, 'residual_image')

# ======================================================================
# DEFINE META-WORKFLOW PIPELINE:
# ======================================================================
# initiation of the 1st-level analysis workflow:
l1pipeline = Workflow(name='l1pipeline')
# stop execution of the workflow if an error is encountered:
l1pipeline.config = {'execution': {'stop_on_first_crash': True}}
# define the base directory of the workflow:
l1pipeline.base_dir = opj(path_root, 'work')
# connect infosource to selectfiles node:
l1pipeline.connect(infosource, 'subject_id', selectfiles, 'subject_id')
# generate subject specific events and regressors to subject_info:
l1pipeline.connect(events_info, 'events_info', subject_info, 'events')
l1pipeline.connect(selectfiles, 'confounds', subject_info, 'confounds')
# connect functional files to smoothing workflow:
# l1pipeline.connect(selectfiles, 'func', susan, 'inputnode.in_files')
# l1pipeline.connect(selectfiles, 'wholemask', susan, 'inputnode.mask_file')
# l1pipeline.connect(susan, 'outputnode.smoothed_files', l1datasink, 'smooth')
# # connect smoothed functional data to the trimming node:
# l1pipeline.connect(susan, 'outputnode.smoothed_files', trim, 'in_file')

# ======================================================================
Example #5
0
                              t_end_EA=endTime_EA,
                              t_end_kinetic_model=endTime_DVR,
                              t_start_SUVR=startTime_50to70min,
                              t_end_SUVR=endTime_50to70min,
                              psf_fwhm_x=psf_fwhm_x,
                              psf_fwhm_y=psf_fwhm_y,
                              psf_fwhm_z=psf_fwhm_z,
                              smooth_fwhm=smooth_fwhm,
                              n_procs=2),  # no_pvc=False
    name="pib_wrapper")

pib_workflow = Workflow(name="pib_workflow")
pib_workflow.base_dir = output_dir
pib_workflow.config = {
    "execution": {
        "crashdump_dir": os.path.join(output_dir, 'pib_crashdumps')
    }
}
pib_workflow.connect([  # PET time frame realignment
    (infosource, getpib, [('idvi', 'key')]),
    (infosource, getpibtiming, [('idvi', 'key')]),
    (infosource, getmusemri, [('idvi', 'key')]),
    (infosource, getmuselabel, [('idvi', 'key')]),
    (infosource, selectfiles, [('idvi', 'idvi')]),
    (infosource, outputdir_gen, [('idvi', 'dir2')]),
    (outputdir_gen, pib_wrapper, [('outputdir', 'outputdir')]),
    (getpib, pib_wrapper, [('pib', 'pet4D')]),
    (getpibtiming, pib_wrapper, [('pibtiming', 'pettiming')]),
    (getmusemri, pib_wrapper, [('musemri', 'mri')]),
    (getmuselabel, pib_wrapper, [('muselabel', 'label')]),
    (selectfiles, pib_wrapper, [('composite', 'mnitransform')]),