Exemple #1
0
 def __init__(self, in_file='path', **options):
     from nipype.interfaces.fsl import ImageMaths
     fslmaths = ImageMaths()
     fslmaths.inputs.in_file = in_file
     for ef in options:
         setattr(fslmaths.inputs, ef, options[ef])
     self.res = fslmaths.run()
Exemple #2
0
def init_b1_mcf(rf_pulse=None, scale=150):
    inputnode = Node(IdentityInterface(fields=['2db1map_file', 'ref_file']),
                     name='inputnode')
    outputnode = Node(IdentityInterface(fields=['b1_plus', 'b1_pulse']),
                      name='outputnode')

    b1_b1 = Node(ExtractROI(t_min=0, t_size=1), name='b1_extract_b1')
    b1_filter = Node(Filter(filter_spec='Gauss,3.0'), name='b1_filter')
    b1_mag = Node(ExtractROI(t_min=1, t_size=1), name='b1_extract_mag')

    b1_reg = Node(FLIRT(out_file='b1mag_reg.nii.gz',
                        out_matrix_file='b1mag_reg.mat'),
                  name='b1_reg')
    b1_invert = Node(ConvertXFM(invert_xfm=True), name='b1_invert')
    b1_apply = Node(FLIRT(apply_xfm=True), name='b1_reg_apply')
    b1_scale = Node(ImageMaths(op_string='-div %f' % scale), name='b1_scale')

    wf = Workflow(name='b1_prep')
    wf.connect([(inputnode, b1_b1, [('2db1map_file', 'in_file')]),
                (inputnode, b1_mag, [('2db1map_file', 'in_file')]),
                (inputnode, b1_reg, [('ref_file', 'in_file')]),
                (inputnode, b1_apply, [('ref_file', 'reference')]),
                (b1_mag, b1_reg, [('roi_file', 'reference')]),
                (b1_reg, b1_invert, [('out_matrix_file', 'in_file')]),
                (b1_invert, b1_apply, [('out_file', 'in_matrix_file')]),
                (b1_b1, b1_filter, [('roi_file', 'in_file')]),
                (b1_filter, b1_apply, [('out_file', 'in_file')]),
                (b1_apply, b1_scale, [('out_file', 'in_file')]),
                (b1_scale, outputnode, [('out_file', 'b1_plus')])])
    if rf_pulse:
        b1_rf = Node(RFProfile(rf=rf_pulse, out_file='b1_rf.nii.gz'),
                     name='b1_rf')
        wf.connect([(b1_scale, b1_rf, [('out_file', 'in_file')]),
                    (b1_rf, outputnode, [('out_file', 'b1_pulse')])])
    return wf
Exemple #3
0
def cert(zfrqs):
    inputnode = Node(IdentityInterface(fields=['cert_180', 'cert_360', 'mask_file']),
                     name='inputnode')
    outputnode = Node(IdentityInterface(fields=['cert_spectrum', 'cert_amide']),
                      name='outputnode')

    cert_sub = Node(ImageMaths(op_string='-sub', out_file='cert.nii.gz'),
                    name='cert_subtract', iterfield=['in_file'])
    amide_index = (np.abs(zfrqs - 3.5)).argmin()
    amide = Node(Select(volumes=[amide_index], out_file='amide.nii.gz'),
                 name='select_amide')

    cert = Workflow(name='CERT')
    cert.connect([(inputnode, cert_sub, [('cert_360', 'in_file'), ('cert_180', 'in_file2')]),
                  (cert_sub, amide, [('out_file', 'in_file')]),
                  (cert_sub, outputnode, [('out_file', 'cert_spectrum')]),
                  (amide, outputnode, [('out_file', 'cert_amide')])
                  ])

    return cert
Exemple #4
0
def cert(zfrqs):
    inputnode = Node(IdentityInterface(fields=['cert_180', 'cert_360', 'mask_file']),
                     name='inputnode')
    outputnode = Node(IdentityInterface(fields=['cert_spectrum', 'cert_amide']),
                      name='outputnode')

    cert_sub = Node(ImageMaths(op_string='-sub', out_file='cert.nii.gz'),
                    name='cert_subtract')
    amide_index = (np.abs(zfrqs - 3.5)).argmin()
    amide = Node(ExtractROI(t_min=amide_index, t_size=1),
                 name='amide_extract')

    cert = Workflow(name='CERT')
    cert.connect([(inputnode, cert_sub, [('cert_360', 'in_file'), ('cert_180', 'in_file2')]),
                  (cert_sub, amide, [('out_file', 'in_file')]),
                  (cert_sub, outputnode, [('out_file', 'cert_spectrum')]),
                  (amide, outputnode, [('roi_file', 'cert_amide')])
                  ])

    return cert
Exemple #5
0
def create_subject_ffx_wf(
        sub_id, bet_fracthr, spatial_fwhm, susan_brightthresh, hp_vols,
        lp_vols, remove_hemi, film_thresh, film_model_autocorr, use_derivs, tr,
        tcon_subtractive, cluster_threshold, cluster_thresh_frac, cluster_p,
        dilate_clusters_voxel, cond_ids, dsdir, work_basedir):
    # todo: new mapnode inputs: cluster_threshold, cluster_p
    """
    Make a workflow including preprocessing, first level, and second level GLM analysis for a given subject.
    This pipeline includes:
    - skull stripping
    - spatial smoothing
    - removing the irrelevant hemisphere
    - temporal band pass filter
    - 1st level GLM
    - averaging f-contrasts from 1st level GLM
    - clustering run-wise f-tests, dilating clusters, and returning binary roi mask
    """

    from nipype.algorithms.modelgen import SpecifyModel
    from nipype.interfaces.fsl import BET, SUSAN, ImageMaths
    from nipype.interfaces.fsl.model import SmoothEstimate, Cluster
    from nipype.interfaces.fsl.maths import TemporalFilter, MathsCommand
    from nipype.interfaces.utility import Function
    from nipype.pipeline.engine import Workflow, Node, MapNode
    from nipype.workflows.fmri.fsl import create_modelfit_workflow
    from nipype.interfaces.fsl.maths import MultiImageMaths
    from nipype.interfaces.utility import IdentityInterface
    import sys
    from os.path import join as pjoin
    import os
    sys.path.insert(
        0, "/data/project/somato/raw/code/roi_glm/custom_node_functions.py")
    # TODO: don't hardcode this
    import custom_node_functions

    # set up sub-workflow
    sub_wf = Workflow(name='subject_%s_wf' % sub_id)
    # set up sub-working-directory
    subwf_wd = pjoin(work_basedir, 'subject_ffx_wfs',
                     'subject_%s_ffx_workdir' % sub_id)
    if not os.path.exists(subwf_wd):
        os.makedirs(subwf_wd)
    sub_wf.base_dir = subwf_wd

    # Grab bold files for all four runs of one subject.
    # in the order [d1_d5, d5_d1, blocked_design1, blocked_design2]
    grab_boldfiles = Node(Function(
        function=custom_node_functions.grab_boldfiles_subject,
        input_names=['sub_id', 'cond_ids', 'ds_dir'],
        output_names=['boldfiles']),
                          name='grab_boldfiles')
    grab_boldfiles.inputs.sub_id = sub_id
    grab_boldfiles.inputs.cond_ids = cond_ids
    grab_boldfiles.inputs.ds_dir = dsdir

    getonsets = Node(Function(
        function=custom_node_functions.grab_blocked_design_onsets_subject,
        input_names=['sub_id', 'prepped_ds_dir'],
        output_names=['blocked_design_onsets_dicts']),
                     name='getonsets')
    getonsets.inputs.sub_id = sub_id
    getonsets.inputs.prepped_ds_dir = dsdir

    # pass bold files through preprocessing pipeline
    bet = MapNode(BET(frac=bet_fracthr, functional=True, mask=True),
                  iterfield=['in_file'],
                  name='bet')

    pick_mask = Node(Function(function=custom_node_functions.pick_first_mask,
                              input_names=['mask_files'],
                              output_names=['first_mask']),
                     name='pick_mask')

    # SUSAN smoothing node
    susan = MapNode(SUSAN(fwhm=spatial_fwhm,
                          brightness_threshold=susan_brightthresh),
                    iterfield=['in_file'],
                    name='susan')

    # bandpass filter node
    bpf = MapNode(TemporalFilter(highpass_sigma=hp_vols / 2.3548,
                                 lowpass_sigma=lp_vols / 2.3548),
                  iterfield=['in_file'],
                  name='bpf')

    # cut away hemisphere node
    if remove_hemi == 'r':
        roi_args = '-roi 96 -1 0 -1 0 -1 0 -1'
    elif remove_hemi == 'l':
        roi_args = '-roi 0 96 0 -1 0 -1 0 -1'
    else:
        raise IOError('did not recognite value of remove_hemi %s' %
                      remove_hemi)

    cut_hemi_func = MapNode(MathsCommand(),
                            iterfield=['in_file'],
                            name='cut_hemi_func')
    cut_hemi_func.inputs.args = roi_args

    cut_hemi_mask = MapNode(MathsCommand(),
                            iterfield=['in_file'],
                            name='cut_hemi_mask')
    cut_hemi_mask.inputs.args = roi_args

    # Make Design and Contrasts for that subject
    # subject_info ist a list of two "Bunches", each for one run, containing conditions, onsets, durations
    designgen = Node(Function(
        input_names=['subtractive_contrast', 'blocked_design_onsets_dicts'],
        output_names=['subject_info', 'contrasts'],
        function=custom_node_functions.make_bunch_and_contrasts),
                     name='designgen')
    designgen.inputs.subtractive_contrasts = tcon_subtractive

    # create 'session_info' for modelfit
    modelspec = MapNode(SpecifyModel(input_units='secs'),
                        name='modelspec',
                        iterfield=['functional_runs', 'subject_info'])
    modelspec.inputs.high_pass_filter_cutoff = hp_vols * tr
    modelspec.inputs.time_repetition = tr

    flatten_session_infos = Node(Function(
        input_names=['nested_list'],
        output_names=['flat_list'],
        function=custom_node_functions.flatten_nested_list),
                                 name='flatten_session_infos')

    # Fist-level workflow
    modelfit = create_modelfit_workflow(f_contrasts=True)
    modelfit.inputs.inputspec.interscan_interval = tr
    modelfit.inputs.inputspec.film_threshold = film_thresh
    modelfit.inputs.inputspec.model_serial_correlations = film_model_autocorr
    modelfit.inputs.inputspec.bases = {'dgamma': {'derivs': use_derivs}}

    # node that reshapes list of copes returned from modelfit
    cope_sorter = Node(Function(input_names=['copes', 'varcopes', 'contrasts'],
                                output_names=['copes', 'varcopes', 'n_runs'],
                                function=custom_node_functions.sort_copes),
                       name='cope_sorter')

    # average zfstats from both runs
    split_zfstats = Node(Function(
        function=custom_node_functions.split_zfstats_runs,
        input_names=['zfstats_list'],
        output_names=['zfstat_run1', 'zfstat_run2']),
                         name='split_zfstats')
    average_zfstats = Node(MultiImageMaths(op_string='-add %s -div 2'),
                           name='mean_images')

    # estimate smoothness of 1st lvl zf-files
    smoothest = MapNode(SmoothEstimate(),
                        name='smoothest',
                        iterfield=['mask_file', 'zstat_file'])

    cluster = MapNode(Cluster(),
                      name='cluster',
                      iterfield=['in_file', 'volume', 'dlh'])
    cluster.inputs.threshold = cluster_threshold
    cluster.inputs.pthreshold = cluster_p
    cluster.inputs.fractional = cluster_thresh_frac
    cluster.inputs.no_table = True
    cluster.inputs.out_threshold_file = True
    cluster.inputs.out_pval_file = True
    cluster.inputs.out_localmax_vol_file = True
    cluster.inputs.out_max_file = True
    cluster.inputs.out_size_file = True

    # dilate clusters
    dilate = MapNode(MathsCommand(args='-kernel sphere %i -dilD' %
                                  dilate_clusters_voxel),
                     iterfield=['in_file'],
                     name='dilate')

    # binarize the result to a mask
    binarize_roi = MapNode(ImageMaths(op_string='-nan -thr 0.001 -bin'),
                           iterfield=['in_file'],
                           name='binarize_roi')

    # connect preprocessing
    sub_wf.connect(grab_boldfiles, 'boldfiles', bet, 'in_file')
    sub_wf.connect(bet, 'out_file', susan, 'in_file')
    sub_wf.connect(susan, 'smoothed_file', bpf, 'in_file')
    sub_wf.connect(bpf, 'out_file', cut_hemi_func, 'in_file')
    sub_wf.connect(bet, 'mask_file', cut_hemi_mask, 'in_file')
    # connect to 1st level model
    sub_wf.connect(cut_hemi_func, 'out_file', modelspec, 'functional_runs')
    sub_wf.connect(getonsets, 'blocked_design_onsets_dicts', designgen,
                   'blocked_design_onsets_dicts')
    sub_wf.connect(designgen, 'subject_info', modelspec, 'subject_info')
    sub_wf.connect(modelspec, 'session_info', flatten_session_infos,
                   'nested_list')
    sub_wf.connect(flatten_session_infos, 'flat_list', modelfit,
                   'inputspec.session_info')
    sub_wf.connect(designgen, 'contrasts', modelfit, 'inputspec.contrasts')
    sub_wf.connect(cut_hemi_func, 'out_file', modelfit,
                   'inputspec.functional_data')
    # connect to cluster thresholding
    sub_wf.connect(cut_hemi_mask, 'out_file', smoothest, 'mask_file')
    sub_wf.connect(modelfit.get_node('modelestimate'), 'zfstats', smoothest,
                   'zstat_file')
    sub_wf.connect(modelfit.get_node('modelestimate'), 'zfstats', cluster,
                   'in_file')
    sub_wf.connect(smoothest, 'dlh', cluster, 'dlh')
    sub_wf.connect(smoothest, 'volume', cluster, 'volume')
    sub_wf.connect(cluster, 'threshold_file', dilate, 'in_file')
    sub_wf.connect(dilate, 'out_file', binarize_roi, 'in_file')
    # connect to averaging f-contrasts
    sub_wf.connect(modelfit.get_node('modelestimate'), 'zfstats',
                   split_zfstats, 'zfstats_list')
    sub_wf.connect(split_zfstats, 'zfstat_run1', average_zfstats, 'in_file')
    sub_wf.connect(split_zfstats, 'zfstat_run2', average_zfstats,
                   'operand_files')
    # redirect to outputspec
    # TODO: redirekt outputspec to datasink in meta-wf
    outputspec = Node(IdentityInterface(fields=[
        'threshold_file', 'index_file', 'pval_file', 'localmax_txt_file'
    ]),
                      name='outputspec')
    sub_wf.connect(cluster, 'threshold_file', outputspec, 'threshold_file')
    sub_wf.connect(cluster, 'index_file', outputspec, 'index_file')
    sub_wf.connect(cluster, 'pval_file', outputspec, 'pval_file')
    sub_wf.connect(cluster, 'localmax_txt_file', outputspec,
                   'localmax_txt_file')
    sub_wf.connect(binarize_roi, 'out_file', outputspec, 'roi')

    # run subject-lvl workflow
    # sub_wf.write_graph(graph2use='colored', dotfilename='./subwf_graph.dot')
    # sub_wf.run(plugin='MultiProc', plugin_args={'n_procs': 6})
    # sub_wf.run(plugin='CondorDAGMan')
    # sub_wf.run()

    return sub_wf
Exemple #6
0
def structural_to_functional_per_participant_test(subjects_sessions,
	template = "~/GitHub/mriPipeline/templates/waxholm/new/WHS_SD_masked.nii.gz",
	f_file_format = "~/GitHub/mripipeline/base/preprocessing/generic_work/_subject_session_{subject}.{session}/_scan_type_SE_EPI/f_bru2nii/",
	s_file_format = "~/GitHub/mripipeline/base/preprocessing/generic_work/_subject_session_{subject}.{session}/_scan_type_T2_TurboRARE/s_bru2nii/",
	num_threads = 3,
	):

	template = os.path.expanduser(template)
	for subject_session in subjects_sessions:
		func_image_dir = os.path.expanduser(f_file_format.format(**subject_session))
		struct_image_dir = os.path.expanduser(s_file_format.format(**subject_session))
		try:
			for myfile in os.listdir(func_image_dir):
				if myfile.endswith((".nii.gz", ".nii")):
					func_image = os.path.join(func_image_dir,myfile)
			for myfile in os.listdir(struct_image_dir):
				if myfile.endswith((".nii.gz", ".nii")):
					struct_image = os.path.join(struct_image_dir,myfile)
		except FileNotFoundError:
			pass
		else:
			n4 = ants.N4BiasFieldCorrection()
			n4.inputs.dimension = 3
			n4.inputs.input_image = struct_image
			# correction bias is introduced (along the z-axis) if the following value is set to under 85. This is likely contingent on resolution.
			n4.inputs.bspline_fitting_distance = 100
			n4.inputs.shrink_factor = 2
			n4.inputs.n_iterations = [200,200,200,200]
			n4.inputs.convergence_threshold = 1e-11
			n4.inputs.output_image = '{}_{}_1_biasCorrection_forRegistration.nii.gz'.format(*subject_session.values())
			n4_res = n4.run()

			_n4 = ants.N4BiasFieldCorrection()
			_n4.inputs.dimension = 3
			_n4.inputs.input_image = struct_image
			# correction bias is introduced (along the z-axis) if the following value is set to under 85. This is likely contingent on resolution.
			_n4.inputs.bspline_fitting_distance = 95
			_n4.inputs.shrink_factor = 2
			_n4.inputs.n_iterations = [500,500,500,500]
			_n4.inputs.convergence_threshold = 1e-14
			_n4.inputs.output_image = '{}_{}_1_biasCorrection_forMasking.nii.gz'.format(*subject_session.values())
			_n4_res = _n4.run()

			#we do this on a separate bias-corrected image to remove hyperintensities which we have to create in order to prevent brain regions being caught by the negative threshold
			struct_cutoff = ImageMaths()
			struct_cutoff.inputs.op_string = "-thrP 20 -uthrp 98"
			struct_cutoff.inputs.in_file = _n4_res.outputs.output_image
			struct_cutoff_res = struct_cutoff.run()

			struct_BET = BET()
			struct_BET.inputs.mask = True
			struct_BET.inputs.frac = 0.3
			struct_BET.inputs.robust = True
			struct_BET.inputs.in_file = struct_cutoff_res.outputs.out_file
			struct_BET.inputs.out_file = '{}_{}_2_brainExtraction.nii.gz'.format(*subject_session.values())
			struct_BET_res = struct_BET.run()

			# we need/can not apply a fill, because the "holes" if any, will be at the rostral edge (touching it, and thus not counting as holes)
			struct_mask = ApplyMask()
			struct_mask.inputs.in_file = n4_res.outputs.output_image
			struct_mask.inputs.mask_file = struct_BET_res.outputs.mask_file
			struct_mask.inputs.out_file = '{}_{}_3_brainMasked.nii.gz'.format(*subject_session.values())
			struct_mask_res = struct_mask.run()

			struct_registration = ants.Registration()
			struct_registration.inputs.fixed_image = template
			struct_registration.inputs.output_transform_prefix = "output_"
			struct_registration.inputs.transforms = ['Affine', 'SyN'] ##
			struct_registration.inputs.transform_parameters = [(1.0,), (1.0, 3.0, 5.0)] ##
			struct_registration.inputs.number_of_iterations = [[2000, 1000, 500], [100, 100, 100]] #
			struct_registration.inputs.dimension = 3
			struct_registration.inputs.write_composite_transform = True
			struct_registration.inputs.collapse_output_transforms = True
			struct_registration.inputs.initial_moving_transform_com = True
			# Tested on Affine transform: CC takes too long; Demons does not tilt, but moves the slices too far caudally; GC tilts too much on
			struct_registration.inputs.metric = ['MeanSquares', 'Mattes']
			struct_registration.inputs.metric_weight = [1, 1]
			struct_registration.inputs.radius_or_number_of_bins = [16, 32] #
			struct_registration.inputs.sampling_strategy = ['Random', None]
			struct_registration.inputs.sampling_percentage = [0.3, 0.3]
			struct_registration.inputs.convergence_threshold = [1.e-11, 1.e-8] #
			struct_registration.inputs.convergence_window_size = [20, 20]
			struct_registration.inputs.smoothing_sigmas = [[4, 2, 1], [4, 2, 1]]
			struct_registration.inputs.sigma_units = ['vox', 'vox']
			struct_registration.inputs.shrink_factors = [[3, 2, 1],[3, 2, 1]]
			struct_registration.inputs.use_estimate_learning_rate_once = [True, True]
			# if the fixed_image is not acquired similarly to the moving_image (e.g. RARE to histological (e.g. AMBMC)) this should be False
			struct_registration.inputs.use_histogram_matching = [False, False]
			struct_registration.inputs.winsorize_lower_quantile = 0.005
			struct_registration.inputs.winsorize_upper_quantile = 0.98
			struct_registration.inputs.args = '--float'
			struct_registration.inputs.num_threads = num_threads

			struct_registration.inputs.moving_image = struct_mask_res.outputs.out_file
			struct_registration.inputs.output_warped_image = '{}_{}_4_structuralRegistration.nii.gz'.format(*subject_session.values())
			struct_registration_res = struct_registration.run()

			warp = ants.ApplyTransforms()
			warp.inputs.reference_image = template
			warp.inputs.input_image_type = 3
			warp.inputs.interpolation = 'Linear'
			warp.inputs.invert_transform_flags = [False]
			warp.inputs.terminal_output = 'file'
			warp.inputs.output_image = '{}_{}_5_functionalWarp.nii.gz'.format(*subject_session.values())
			warp.num_threads = num_threads

			warp.inputs.input_image = func_image
			warp.inputs.transforms = struct_registration_res.outputs.composite_transform
			warp.run()
Exemple #7
0
def main(paths, options_binary_string, ANAT, num_proc=7):

    json_path = paths[0]
    base_directory = paths[1]
    motion_correction_bet_directory = paths[2]
    parent_wf_directory = paths[3]
    # functional_connectivity_directory=paths[4]
    coreg_reg_directory = paths[5]
    atlas_resize_reg_directory = paths[6]
    subject_list = paths[7]
    datasink_name = paths[8]
    # fc_datasink_name=paths[9]
    atlasPath = paths[10]
    # brain_path=paths[11]
    # mask_path=paths[12]
    # atlas_path=paths[13]
    # tr_path=paths[14]
    # motion_params_path=paths[15]
    # func2std_mat_path=paths[16]
    # MNI3mm_path=paths[17]
    # demographics_file_path = paths[18]
    # phenotype_file_path = paths[19]
    data_directory = paths[20]

    number_of_subjects = len(subject_list)
    print("Working with ", number_of_subjects, " subjects.")

    # Create our own custom function - BIDSDataGrabber using a Function Interface.

    # In[858]:

    def get_nifti_filenames(subject_id, data_dir):
        #     Remember that all the necesary imports need to be INSIDE the function for the Function Interface to work!
        from bids.grabbids import BIDSLayout

        layout = BIDSLayout(data_dir)
        run = 1

        anat_file_path = [
            f.filename for f in layout.get(
                subject=subject_id, type='T1w', extensions=['nii', 'nii.gz'])
        ]
        func_file_path = [
            f.filename for f in layout.get(subject=subject_id,
                                           type='bold',
                                           run=run,
                                           extensions=['nii', 'nii.gz'])
        ]

        if len(anat_file_path) == 0:
            return None, func_file_path[0]  # No Anatomical files present
        return anat_file_path[0], func_file_path[0]

    BIDSDataGrabber = Node(Function(
        function=get_nifti_filenames,
        input_names=['subject_id', 'data_dir'],
        output_names=['anat_file_path', 'func_file_path']),
                           name='BIDSDataGrabber')
    # BIDSDataGrabber.iterables = [('subject_id',subject_list)]
    BIDSDataGrabber.inputs.data_dir = data_directory

    # ## Return TR

    def get_TR(in_file):
        from bids.grabbids import BIDSLayout

        data_directory = '/home1/varunk/data/ABIDE1/RawDataBIDs'
        layout = BIDSLayout(data_directory)
        metadata = layout.get_metadata(path=in_file)
        TR = metadata['RepetitionTime']
        return TR

    # ---------------- Added new Node to return TR and other slice timing correction params-------------------------------
    def _getMetadata(in_file):
        from bids.grabbids import BIDSLayout
        import logging

        logger = logging.getLogger(__name__)
        logger.setLevel(logging.DEBUG)

        # create a file handler
        handler = logging.FileHandler('progress.log')

        # add the handlers to the logger
        logger.addHandler(handler)

        interleaved = True
        index_dir = False
        data_directory = '/home1/varunk/data/ABIDE1/RawDataBIDs'
        layout = BIDSLayout(data_directory)
        metadata = layout.get_metadata(path=in_file)
        print(metadata)

        logger.info('Extracting Meta Data of file: %s', in_file)
        try:
            tr = metadata['RepetitionTime']
        except KeyError:
            print(
                'Key RepetitionTime not found in task-rest_bold.json so using a default of 2.0 '
            )
            tr = 2
            logger.error(
                'Key RepetitionTime not found in task-rest_bold.json for file %s so using a default of 2.0 ',
                in_file)

        try:
            slice_order = metadata['SliceAcquisitionOrder']
        except KeyError:
            print(
                'Key SliceAcquisitionOrder not found in task-rest_bold.json so using a default of interleaved ascending '
            )
            logger.error(
                'Key SliceAcquisitionOrder not found in task-rest_bold.json for file %s so using a default of interleaved ascending',
                in_file)
            return tr, index_dir, interleaved

        if slice_order.split(' ')[0] == 'Sequential':
            interleaved = False
        if slice_order.split(' ')[1] == 'Descending':
            index_dir = True

        return tr, index_dir, interleaved

    getMetadata = Node(Function(
        function=_getMetadata,
        input_names=['in_file'],
        output_names=['tr', 'index_dir', 'interleaved']),
                       name='getMetadata')

    # ### Skipping 4 starting scans
    # Extract ROI for skipping first 4 scans of the functional data
    # > **Arguments:**
    # t_min: (corresponds to time dimension) Denotes the starting time of the inclusion
    # t_size: Denotes the number of scans to include
    #
    # The logic behind skipping 4 initial scans is to take scans after the subject has stabalized in the scanner.

    # In[863]:

    # ExtractROI - skip dummy scans
    extract = Node(ExtractROI(t_min=4, t_size=-1),
                   output_type='NIFTI',
                   name="extract")

    # ### Slice time correction
    # Created a Node that does slice time correction
    # > **Arguments**:
    # index_dir=False -> Slices were taken bottom to top i.e. in ascending order
    # interleaved=True means odd slices were acquired first and then even slices [or vice versa(Not sure)]

    slicetimer = Node(SliceTimer(output_type='NIFTI'), name="slicetimer")

    # ### Motion Correction
    # Motion correction is done using fsl's mcflirt. It alligns all the volumes of a functional scan to each other

    # MCFLIRT - motion correction
    mcflirt = Node(MCFLIRT(mean_vol=True, save_plots=True,
                           output_type='NIFTI'),
                   name="mcflirt")

    #  Just a dummy node to transfer the output of Mcflirt to the next workflow. Needed if we didnt want to use the Mcflirt
    from_mcflirt = Node(IdentityInterface(fields=['in_file']),
                        name="from_mcflirt")

    # ### Skull striping
    # I used fsl's BET

    # In[868]:

    skullStrip = Node(BET(mask=False, frac=0.3, robust=True),
                      name='skullStrip')  #

    # *Note*: Do not include special characters in ```name``` field above coz then  wf.writegraph will cause issues

    # ## Resample
    # I needed to resample the anatomical file from 1mm to 3mm. Because registering a 1mm file was taking a huge amount of time.
    #

    # In[872]:

    # Resample - resample anatomy to 3x3x3 voxel resolution
    resample_mni = Node(
        Resample(
            voxel_size=(3, 3, 3),
            resample_mode='Cu',  # cubic interpolation
            outputtype='NIFTI'),
        name="resample_mni")

    resample_anat = Node(
        Resample(
            voxel_size=(3, 3, 3),
            resample_mode='Cu',  # cubic interpolation
            outputtype='NIFTI'),
        name="resample_anat")

    # In[873]:

    resample_atlas = Node(
        Resample(
            voxel_size=(3, 3, 3),
            resample_mode='NN',  # cubic interpolation
            outputtype='NIFTI'),
        name="resample_atlas")

    resample_atlas.inputs.in_file = atlasPath

    # # Matrix operations
    # ### For concatenating the transformation matrices

    concat_xform = Node(ConvertXFM(concat_xfm=True), name='concat_xform')

    # Node to calculate the inverse of func2std matrix
    inv_mat = Node(ConvertXFM(invert_xfm=True), name='inv_mat')

    # ## Extracting the mean brain

    meanfunc = Node(interface=ImageMaths(op_string='-Tmean', suffix='_mean'),
                    name='meanfunc')

    meanfuncmask = Node(interface=BET(mask=True, no_output=True, frac=0.3),
                        name='meanfuncmask')

    # ## Apply Mask

    # Does BET (masking) on the whole func scan [Not using this, creates bug for join node]
    maskfunc = Node(interface=ImageMaths(suffix='_bet', op_string='-mas'),
                    name='maskfunc')

    # Does BET (masking) on the mean func scan
    maskfunc4mean = Node(interface=ImageMaths(suffix='_bet', op_string='-mas'),
                         name='maskfunc4mean')

    # ## Datasink
    # I needed to define the structure of what files are saved and where.

    # Create DataSink object
    dataSink = Node(DataSink(), name='datasink')

    # Name of the output folder
    dataSink.inputs.base_directory = opj(base_directory, datasink_name)

    # Define substitution strings so that the data is similar to BIDS
    substitutions = [
        ('_subject_id_', 'sub-'), ('_resample_brain_flirt.nii_brain', ''),
        ('_roi_st_mcf_flirt.nii_brain_flirt', ''),
        ('task-rest_run-1_bold_roi_st_mcf.nii', 'motion_params'),
        ('T1w_resample_brain_flirt_sub-0050002_task-rest_run-1_bold_roi_st_mcf_mean_bet_flirt',
         'fun2std')
    ]

    # Feed the substitution strings to the DataSink node
    dataSink.inputs.substitutions = substitutions

    # ### Apply Mask to functional data
    # Mean file of the motion corrected functional scan is sent to
    # skullStrip to get just the brain and the mask_image.
    # Mask_image is just a binary file (containing 1 where brain is present and 0 where it isn't).
    # After getting the mask_image form skullStrip, apply that mask to aligned
    # functional image to extract its brain and remove the skull

    # In[889]:

    # Function
    # in_file: The file on which you want to apply mask
    # in_file2 = mask_file:  The mask you want to use. Make sure that mask_file has same size as in_file
    # out_file : Result of applying mask in in_file -> Gives the path of the output file

    def applyMask_func(in_file, in_file2):
        import numpy as np
        import nibabel as nib
        import os
        from os.path import join as opj

        # convert from unicode to string : u'/tmp/tmp8daO2Q/..' -> '/tmp/tmp8daO2Q/..' i.e. removes the prefix 'u'
        mask_file = in_file2

        brain_data = nib.load(in_file)
        mask_data = nib.load(mask_file)

        brain = brain_data.get_data().astype('float32')
        mask = mask_data.get_data()

        # applying mask by multiplying elementwise to the binary mask

        if len(brain.shape) == 3:  # Anat file
            brain = np.multiply(brain, mask)
        elif len(brain.shape) > 3:  # Functional File
            for t in range(brain.shape[-1]):
                brain[:, :, :, t] = np.multiply(brain[:, :, :, t], mask)
        else:
            pass

        # Saving the brain file

        path = os.getcwd()

        in_file_split_list = in_file.split('/')
        in_file_name = in_file_split_list[-1]

        out_file = in_file_name + '_brain.nii.gz'  # changing name
        brain_with_header = nib.Nifti1Image(brain,
                                            affine=brain_data.affine,
                                            header=brain_data.header)
        nib.save(brain_with_header, out_file)

        out_file = opj(path, out_file)
        out_file2 = in_file2

        return out_file, out_file2

    # #### Things learnt:
    # 1. I found out that whenever a node is being executed, it becomes the current directory and whatever file you create now, will be stored here.
    # 2. #from IPython.core.debugger import Tracer; Tracer()()    # Debugger doesnt work in nipype

    # Wrap the above function inside a Node

    # In[890]:

    applyMask = Node(Function(function=applyMask_func,
                              input_names=['in_file', 'in_file2'],
                              output_names=['out_file', 'out_file2']),
                     name='applyMask')

    # ### Some nodes needed for Co-registration and Normalization

    # Node for getting the xformation matrix
    func2anat_reg = Node(FLIRT(output_type='NIFTI'), name="func2anat_reg")

    # Node for applying xformation matrix to functional data
    func2std_xform = Node(FLIRT(output_type='NIFTI', apply_xfm=True),
                          name="func2std_xform")

    # Node for applying xformation matrix to functional data
    std2func_xform = Node(FLIRT(output_type='NIFTI',
                                apply_xfm=True,
                                interp='nearestneighbour'),
                          name="std2func_xform")

    # Node for Normalizing/Standardizing the anatomical and getting the xformation matrix
    anat2std_reg = Node(FLIRT(output_type='NIFTI'), name="anat2std_reg")

    # I wanted to use the MNI file as input to the workflow so I created an Identity
    # Node that reads the MNI file path and outputs the same MNI file path.
    # Then I connected this node to whereever it was needed.

    MNI152_2mm = Node(IdentityInterface(fields=['standard_file', 'mask_file']),
                      name="MNI152_2mm")
    # Set the mask_file and standard_file input in the Node. This setting sets the input mask_file permanently.
    MNI152_2mm.inputs.mask_file = os.path.expandvars(
        '$FSLDIR/data/standard/MNI152_T1_2mm_brain_mask.nii.gz')

    MNI152_2mm.inputs.standard_file = os.path.expandvars(
        '$FSLDIR/data/standard/MNI152_T1_2mm_brain.nii.gz')
    # MNI152_2mm.inputs.mask_file = '/usr/share/fsl/5.0/data/standard/MNI152_T1_2mm_brain_mask.nii.gz'
    # MNI152_2mm.inputs.standard_file = '/usr/share/fsl/5.0/data/standard/MNI152_T1_2mm_brain.nii.gz'

    # ## Band Pass Filtering
    # Let's do a band pass filtering on the data using the code from https://neurostars.org/t/bandpass-filtering-different-outputs-from-fsl-and-nipype-custom-function/824/2

    ### AFNI

    bandpass = Node(afni.Bandpass(highpass=0.008,
                                  lowpass=0.08,
                                  despike=False,
                                  no_detrend=True,
                                  notrans=True,
                                  outputtype='NIFTI_GZ'),
                    name='bandpass')

    # ### Following is a Join Node that collects the preprocessed file paths and saves them in a file

    # In[902]:

    def save_file_list_function_in_brain(in_brain):
        import numpy as np
        import os
        from os.path import join as opj

        file_list = np.asarray(in_brain)
        print('######################## File List ######################: \n',
              file_list)

        np.save('brain_file_list', file_list)
        file_name = 'brain_file_list.npy'
        out_brain = opj(os.getcwd(), file_name)  # path
        return out_brain

    def save_file_list_function_in_mask(in_mask):
        import numpy as np
        import os
        from os.path import join as opj

        file_list2 = np.asarray(in_mask)
        print('######################## File List ######################: \n',
              file_list2)

        np.save('mask_file_list', file_list2)
        file_name2 = 'mask_file_list.npy'
        out_mask = opj(os.getcwd(), file_name2)  # path
        return out_mask

    def save_file_list_function_in_motion_params(in_motion_params):
        import numpy as np
        import os
        from os.path import join as opj

        file_list3 = np.asarray(in_motion_params)
        print('######################## File List ######################: \n',
              file_list3)

        np.save('motion_params_file_list', file_list3)
        file_name3 = 'motion_params_file_list.npy'
        out_motion_params = opj(os.getcwd(), file_name3)  # path
        return out_motion_params

    def save_file_list_function_in_motion_outliers(in_motion_outliers):
        import numpy as np
        import os
        from os.path import join as opj

        file_list4 = np.asarray(in_motion_outliers)
        print('######################## File List ######################: \n',
              file_list4)

        np.save('motion_outliers_file_list', file_list4)
        file_name4 = 'motion_outliers_file_list.npy'
        out_motion_outliers = opj(os.getcwd(), file_name4)  # path
        return out_motion_outliers

    def save_file_list_function_in_joint_xformation_matrix(
            in_joint_xformation_matrix):
        import numpy as np
        import os
        from os.path import join as opj

        file_list5 = np.asarray(in_joint_xformation_matrix)
        print('######################## File List ######################: \n',
              file_list5)

        np.save('joint_xformation_matrix_file_list', file_list5)
        file_name5 = 'joint_xformation_matrix_file_list.npy'
        out_joint_xformation_matrix = opj(os.getcwd(), file_name5)  # path
        return out_joint_xformation_matrix

    def save_file_list_function_in_tr(in_tr):
        import numpy as np
        import os
        from os.path import join as opj

        tr_list = np.asarray(in_tr)
        print('######################## TR List ######################: \n',
              tr_list)

        np.save('tr_list', tr_list)
        file_name6 = 'tr_list.npy'
        out_tr = opj(os.getcwd(), file_name6)  # path
        return out_tr

    def save_file_list_function_in_atlas(in_atlas):
        import numpy as np
        import os
        from os.path import join as opj

        file_list7 = np.asarray(in_atlas)
        print('######################## File List ######################: \n',
              file_list7)

        np.save('atlas_file_list', file_list7)
        file_name7 = 'atlas_file_list.npy'
        out_atlas = opj(os.getcwd(), file_name7)  # path
        return out_atlas

    save_file_list_in_brain = JoinNode(Function(
        function=save_file_list_function_in_brain,
        input_names=['in_brain'],
        output_names=['out_brain']),
                                       joinsource="infosource",
                                       joinfield=['in_brain'],
                                       name="save_file_list_in_brain")

    save_file_list_in_mask = JoinNode(Function(
        function=save_file_list_function_in_mask,
        input_names=['in_mask'],
        output_names=['out_mask']),
                                      joinsource="infosource",
                                      joinfield=['in_mask'],
                                      name="save_file_list_in_mask")

    save_file_list_in_motion_outliers = JoinNode(
        Function(function=save_file_list_function_in_motion_outliers,
                 input_names=['in_motion_outliers'],
                 output_names=['out_motion_outliers']),
        joinsource="infosource",
        joinfield=['in_motion_outliers'],
        name="save_file_list_in_motion_outliers")

    save_file_list_in_motion_params = JoinNode(
        Function(function=save_file_list_function_in_motion_params,
                 input_names=['in_motion_params'],
                 output_names=['out_motion_params']),
        joinsource="infosource",
        joinfield=['in_motion_params'],
        name="save_file_list_in_motion_params")

    save_file_list_in_joint_xformation_matrix = JoinNode(
        Function(function=save_file_list_function_in_joint_xformation_matrix,
                 input_names=['in_joint_xformation_matrix'],
                 output_names=['out_joint_xformation_matrix']),
        joinsource="infosource",
        joinfield=['in_joint_xformation_matrix'],
        name="save_file_list_in_joint_xformation_matrix")

    save_file_list_in_tr = JoinNode(Function(
        function=save_file_list_function_in_tr,
        input_names=['in_tr'],
        output_names=['out_tr']),
                                    joinsource="infosource",
                                    joinfield=['in_tr'],
                                    name="save_file_list_in_tr")

    save_file_list_in_atlas = JoinNode(Function(
        function=save_file_list_function_in_atlas,
        input_names=['in_atlas'],
        output_names=['out_atlas']),
                                       joinsource="infosource",
                                       joinfield=['in_atlas'],
                                       name="save_file_list_in_atlas")

    # save_file_list = JoinNode(Function(function=save_file_list_function, input_names=['in_brain', 'in_mask', 'in_motion_params','in_motion_outliers','in_joint_xformation_matrix', 'in_tr', 'in_atlas'],
    #                output_names=['out_brain','out_mask','out_motion_params','out_motion_outliers','out_joint_xformation_matrix','out_tr', 'out_atlas']),
    #                joinsource="infosource",
    #                joinfield=['in_brain', 'in_mask', 'in_motion_params','in_motion_outliers','in_joint_xformation_matrix','in_tr', 'in_atlas'],
    #                name="save_file_list")

    # def save_file_list_function(in_brain, in_mask, in_motion_params, in_motion_outliers, in_joint_xformation_matrix, in_tr, in_atlas):
    #     # Imports
    #     import numpy as np
    #     import os
    #     from os.path import join as opj
    #
    #
    #     file_list = np.asarray(in_brain)
    #     print('######################## File List ######################: \n',file_list)
    #
    #     np.save('brain_file_list',file_list)
    #     file_name = 'brain_file_list.npy'
    #     out_brain = opj(os.getcwd(),file_name) # path
    #
    #
    #     file_list2 = np.asarray(in_mask)
    #     print('######################## File List ######################: \n',file_list2)
    #
    #     np.save('mask_file_list',file_list2)
    #     file_name2 = 'mask_file_list.npy'
    #     out_mask = opj(os.getcwd(),file_name2) # path
    #
    #
    #     file_list3 = np.asarray(in_motion_params)
    #     print('######################## File List ######################: \n',file_list3)
    #
    #     np.save('motion_params_file_list',file_list3)
    #     file_name3 = 'motion_params_file_list.npy'
    #     out_motion_params = opj(os.getcwd(),file_name3) # path
    #
    #
    #     file_list4 = np.asarray(in_motion_outliers)
    #     print('######################## File List ######################: \n',file_list4)
    #
    #     np.save('motion_outliers_file_list',file_list4)
    #     file_name4 = 'motion_outliers_file_list.npy'
    #     out_motion_outliers = opj(os.getcwd(),file_name4) # path
    #
    #
    #     file_list5 = np.asarray(in_joint_xformation_matrix)
    #     print('######################## File List ######################: \n',file_list5)
    #
    #     np.save('joint_xformation_matrix_file_list',file_list5)
    #     file_name5 = 'joint_xformation_matrix_file_list.npy'
    #     out_joint_xformation_matrix = opj(os.getcwd(),file_name5) # path
    #
    #     tr_list = np.asarray(in_tr)
    #     print('######################## TR List ######################: \n',tr_list)
    #
    #     np.save('tr_list',tr_list)
    #     file_name6 = 'tr_list.npy'
    #     out_tr = opj(os.getcwd(),file_name6) # path
    #
    #
    #     file_list7 = np.asarray(in_atlas)
    #     print('######################## File List ######################: \n',file_list7)
    #
    #     np.save('atlas_file_list',file_list7)
    #     file_name7 = 'atlas_file_list.npy'
    #     out_atlas = opj(os.getcwd(),file_name7) # path
    #
    #
    #
    #
    #     return out_brain, out_mask, out_motion_params, out_motion_outliers, out_joint_xformation_matrix, out_tr , out_atlas
    #
    #
    #
    # save_file_list = JoinNode(Function(function=save_file_list_function, input_names=['in_brain', 'in_mask', 'in_motion_params','in_motion_outliers','in_joint_xformation_matrix', 'in_tr', 'in_atlas'],
    #                  output_names=['out_brain','out_mask','out_motion_params','out_motion_outliers','out_joint_xformation_matrix','out_tr', 'out_atlas']),
    #                  joinsource="infosource",
    #                  joinfield=['in_brain', 'in_mask', 'in_motion_params','in_motion_outliers','in_joint_xformation_matrix','in_tr', 'in_atlas'],
    #                  name="save_file_list")

    # ### Motion outliers

    motionOutliers = Node(MotionOutliers(no_motion_correction=False,
                                         metric='fd',
                                         out_metric_plot='fd_plot.png',
                                         out_metric_values='fd_raw.txt'),
                          name='motionOutliers')

    # ## Workflow for atlas registration  from std to functional

    wf_atlas_resize_reg = Workflow(name=atlas_resize_reg_directory)

    wf_atlas_resize_reg.connect([

        # Apply the inverse matrix to the 3mm Atlas to transform it to func space
        (maskfunc4mean, std2func_xform, [(('out_file', 'reference'))]),
        (resample_atlas, std2func_xform, [('out_file', 'in_file')]),

        # Now, applying the inverse matrix
        (inv_mat, std2func_xform, [('out_file', 'in_matrix_file')]
         ),  # output: Atlas in func space
        (std2func_xform, save_file_list_in_atlas, [('out_file', 'in_atlas')]),

        # ---------------------------Save the required files --------------------------------------------
        (save_file_list_in_motion_params, dataSink,
         [('out_motion_params', 'motion_params_paths.@out_motion_params')]),
        (save_file_list_in_motion_outliers, dataSink,
         [('out_motion_outliers', 'motion_outliers_paths.@out_motion_outliers')
          ]),
        (save_file_list_in_brain, dataSink,
         [('out_brain', 'preprocessed_brain_paths.@out_brain')]),
        (save_file_list_in_mask, dataSink,
         [('out_mask', 'preprocessed_mask_paths.@out_mask')]),
        (save_file_list_in_joint_xformation_matrix, dataSink,
         [('out_joint_xformation_matrix',
           'joint_xformation_matrix_paths.@out_joint_xformation_matrix')]),
        (save_file_list_in_tr, dataSink, [('out_tr', 'tr_paths.@out_tr')]),
        (save_file_list_in_atlas, dataSink, [('out_atlas',
                                              'atlas_paths.@out_atlas')])
    ])

    # In[909]:

    wf_coreg_reg = Workflow(name=coreg_reg_directory)
    # wf_coreg_reg.base_dir = base_directory
    # Dir where all the outputs will be stored(inside coregistrationPipeline folder).

    if ANAT == 1:
        wf_coreg_reg.connect(BIDSDataGrabber, 'anat_file_path', skullStrip,
                             'in_file')  # Resampled the anat file to 3mm

        wf_coreg_reg.connect(skullStrip, 'out_file', resample_anat, 'in_file')

        wf_coreg_reg.connect(
            resample_anat, 'out_file', func2anat_reg, 'reference'
        )  # Make the resampled file as reference in func2anat_reg

        # Sec 1. The above 3 steps registers the mean image to resampled anat image and
        # calculates the xformation matrix .. I hope the xformation matrix will be saved

        wf_coreg_reg.connect(MNI152_2mm, 'standard_file', resample_mni,
                             'in_file')

        wf_coreg_reg.connect(resample_mni, 'out_file', anat2std_reg,
                             'reference')

        wf_coreg_reg.connect(resample_anat, 'out_file', anat2std_reg,
                             'in_file')

        # Calculates the Xformationmatrix from anat3mm to MNI 3mm

        # We can get those matrices by refering to func2anat_reg.outputs.out_matrix_file and similarly for anat2std_reg

        wf_coreg_reg.connect(func2anat_reg, 'out_matrix_file', concat_xform,
                             'in_file')

        wf_coreg_reg.connect(anat2std_reg, 'out_matrix_file', concat_xform,
                             'in_file2')

        wf_coreg_reg.connect(concat_xform, 'out_file', dataSink,
                             'tranformation_matrix_fun2std.@out_file')

        wf_coreg_reg.connect(concat_xform, 'out_file',
                             save_file_list_in_joint_xformation_matrix,
                             'in_joint_xformation_matrix')

        # Now inverse the func2std MAT to std2func
        wf_coreg_reg.connect(concat_xform, 'out_file', wf_atlas_resize_reg,
                             'inv_mat.in_file')
# ------------------------------------------------------------------------------------------------------------------------------

# Registration of Functional to MNI 3mm space w/o using anatomical
    if ANAT == 0:
        print('Not using Anatomical high resoulution files')
        wf_coreg_reg.connect(MNI152_2mm, 'standard_file', resample_mni,
                             'in_file')
        wf_coreg_reg.connect(
            resample_mni, 'out_file', func2anat_reg, 'reference'
        )  # Make the resampled file as reference in func2anat_reg

        wf_coreg_reg.connect(func2anat_reg, 'out_matrix_file', dataSink,
                             'tranformation_matrix_fun2std.@out_file')

        wf_coreg_reg.connect(func2anat_reg, 'out_matrix_file',
                             save_file_list_in_joint_xformation_matrix,
                             'in_joint_xformation_matrix')

        # Now inverse the func2std MAT to std2func
        wf_coreg_reg.connect(func2anat_reg, 'out_matrix_file',
                             wf_atlas_resize_reg, 'inv_mat.in_file')

    # ## Co-Registration, Normalization and Bandpass Workflow
    # 1. Co-registration means alligning the func to anat
    # 2. Normalization means aligning func/anat to standard
    # 3. Applied band pass filtering in range - highpass=0.008, lowpass=0.08

    # In[910]:

    wf_motion_correction_bet = Workflow(name=motion_correction_bet_directory)
    # wf_motion_correction_bet.base_dir = base_directory

    wf_motion_correction_bet.connect([
        (from_mcflirt, meanfunc, [('in_file', 'in_file')]),
        (meanfunc, meanfuncmask, [('out_file', 'in_file')]),
        (from_mcflirt, applyMask, [('in_file', 'in_file')]),  # 1
        (meanfuncmask, applyMask, [
            ('mask_file', 'in_file2')
        ]),  # 2 output: 1&2,  BET on coregistered fmri scan
        (meanfunc, maskfunc4mean, [('out_file', 'in_file')]),  # 3
        (meanfuncmask, maskfunc4mean,
         [('mask_file', 'in_file2')]),  # 4 output: 3&4, BET on mean func scan
        (applyMask, save_file_list_in_brain, [('out_file', 'in_brain')]),
        (applyMask, save_file_list_in_mask, [('out_file2', 'in_mask')]),
        (maskfunc4mean, wf_coreg_reg, [('out_file', 'func2anat_reg.in_file')])
    ])

    infosource = Node(IdentityInterface(fields=['subject_id']),
                      name="infosource")

    infosource.iterables = [('subject_id', subject_list)]

    # Create the workflow

    wf = Workflow(name=parent_wf_directory)
    # base_dir = opj(s,'result')
    wf.base_dir = base_directory  # Dir where all the outputs will be stored(inside BETFlow folder).

    # wf.connect([      (infosource, BIDSDataGrabber, [('subject_id','subject_id')]),
    #                   (BIDSDataGrabber, extract, [('func_file_path','in_file')]),
    #
    #                   (BIDSDataGrabber,getMetadata, [('func_file_path','in_file')]),
    #
    #                   (getMetadata,slicetimer, [('tr','time_repetition')]),
    #
    #
    #                   (getMetadata,slicetimer, [('index_dir','index_dir')]),
    #
    #                   (getMetadata,slicetimer, [('interleaved','interleaved')]),
    #
    #                   (getMetadata,save_file_list_in_tr, [('tr','in_tr')]),
    #
    #                   (extract,slicetimer,[('roi_file','in_file')]),
    #
    #                   (slicetimer, mcflirt,[('slice_time_corrected_file','in_file')])
    #                   (mcflirt,dataSink,[('par_file','motion_params.@par_file')]), # saves the motion parameters calculated before
    #
    #                   (mcflirt,save_file_list_in_motion_params,[('par_file','in_motion_params')]),
    #
    #                   (mcflirt,wf_motion_correction_bet,[('out_file','from_mcflirt.in_file')])
    #            ])
    # # Run it in parallel
    # wf.run('MultiProc', plugin_args={'n_procs': num_proc})
    #
    #
    #
    # # Visualize the detailed graph
    # # from IPython.display import Image
    # wf.write_graph(graph2use='flat', format='png', simple_form=True)

    # Options:
    # discard 4 Volumes (extract), slicetimer, mcflirt
    print('Preprocessing Options:')
    print('Skipping 4 dummy volumes - ', options_binary_string[0])
    print('Slicetiming correction - ', options_binary_string[1])
    print('Finding Motion Outliers - ', options_binary_string[2])
    print('Doing Motion Correction - ', options_binary_string[3])

    # ANAT = 0
    nodes = [extract, slicetimer, motionOutliers, mcflirt]
    wf.connect(infosource, 'subject_id', BIDSDataGrabber, 'subject_id')
    wf.connect(BIDSDataGrabber, 'func_file_path', getMetadata, 'in_file')
    wf.connect(getMetadata, 'tr', save_file_list_in_tr, 'in_tr')

    old_node = BIDSDataGrabber
    old_node_output = 'func_file_path'

    for idx, include in enumerate(options_binary_string):

        if old_node == extract:
            old_node_output = 'roi_file'
        elif old_node == slicetimer:
            old_node_output = 'slice_time_corrected_file'
        # elif old_node == mcflirt:

        # old_node_output = 'out_file'

        if int(include):
            new_node = nodes[idx]

            if new_node == slicetimer:
                wf.connect(getMetadata, 'tr', slicetimer, 'time_repetition')
                wf.connect(getMetadata, 'index_dir', slicetimer, 'index_dir')
                wf.connect(getMetadata, 'interleaved', slicetimer,
                           'interleaved')
                new_node_input = 'in_file'
            elif new_node == extract:
                new_node_input = 'in_file'
            elif new_node == mcflirt:
                new_node_input = 'in_file'
                wf.connect(mcflirt, 'par_file', dataSink,
                           'motion_params.@par_file'
                           )  # saves the motion parameters calculated before

                wf.connect(mcflirt, 'par_file',
                           save_file_list_in_motion_params, 'in_motion_params')

                wf.connect(mcflirt, 'out_file', wf_motion_correction_bet,
                           'from_mcflirt.in_file')

            elif new_node == motionOutliers:

                wf.connect(meanfuncmask, 'mask_file', motionOutliers, 'mask')

                wf.connect(motionOutliers, 'out_file', dataSink,
                           'motionOutliers.@out_file')

                wf.connect(motionOutliers, 'out_metric_plot', dataSink,
                           'motionOutliers.@out_metric_plot')

                wf.connect(motionOutliers, 'out_metric_values', dataSink,
                           'motionOutliers.@out_metric_values')

                wf.connect(motionOutliers, 'out_file',
                           save_file_list_in_motion_outliers,
                           'in_motion_outliers')

                new_node_input = 'in_file'

                wf.connect(old_node, old_node_output, new_node, new_node_input)

                continue

            wf.connect(old_node, old_node_output, new_node, new_node_input)

            old_node = new_node

        else:
            if idx == 3:
                # new_node = from_mcflirt
                # new_node_input = 'from_mcflirt.in_file'

                wf.connect(old_node, old_node_output, wf_motion_correction_bet,
                           'from_mcflirt.in_file')

                # old_node = new_node

    TEMP_DIR_FOR_STORAGE = opj(base_directory, 'crash_files')
    wf.config = {"execution": {"crashdump_dir": TEMP_DIR_FOR_STORAGE}}

    # Visualize the detailed graph
    # from IPython.display import Image

    wf.write_graph(graph2use='flat', format='png', simple_form=True)

    # Run it in parallel
    wf.run('MultiProc', plugin_args={'n_procs': num_proc})
def _main(subject_list,vols,subid_vol_dict, number_of_skipped_volumes,brain_path,\
    mask_path,\
    atlas_path,\
    tr_path,\
    motion_params_path,\
    func2std_mat_path,\
    MNI3mm_path,\
    base_directory,\
    fc_datasink_name,\
   motion_param_regression,\
   band_pass_filtering,\
   global_signal_regression,\
   smoothing,\
   volcorrect,\
   num_proc,\
   functional_connectivity_directory ):

    # ## Volume correction
    # * I have already extracted 4 volumes.
    # * Now extract 120 - 4 = 116 volumes from each subject
    # * So define vols = 114
    #

    if number_of_skipped_volumes == None:
        number_of_skipped_volumes = 4
    vols = vols - number_of_skipped_volumes

    def vol_correct(sub_id, subid_vol_dict, vols, number_of_skipped_volumes):
        sub_vols = subid_vol_dict[sub_id] - number_of_skipped_volumes
        if sub_vols > vols:
            t_min = sub_vols - vols
        elif sub_vols == vols:
            t_min = 0
        else:
            raise Exception('Volumes of Sub ',sub_id,' less than desired!')
        return int(t_min)


    # In[491]:



    volCorrect = Node(Function(function=vol_correct, input_names=['sub_id','subid_vol_dict','vols','number_of_skipped_volumes'],
                                    output_names=['t_min']), name='volCorrect')

    volCorrect.inputs.subid_vol_dict = subid_vol_dict
    volCorrect.inputs.vols = vols
    volCorrect.inputs.number_of_skipped_volumes = number_of_skipped_volumes


    # ## Define a function to fetch the filenames of a particular subject ID



    def get_subject_filenames(subject_id,brain_path,mask_path,atlas_path,tr_path,motion_params_path,func2std_mat_path,MNI3mm_path):
        import re
        from itertools import zip_longest
        for brain,mask,atlas,tr,motion_param,func2std_mat in zip_longest(brain_path,mask_path,atlas_path,tr_path,motion_params_path,func2std_mat_path): #itertools helps to zip unequal save_file_list_in_mask
        #  Source : https://stackoverflow.com/questions/11318977/zipping-unequal-lists-in-python-in-to-a-list-which-does-not-drop-any-element-fro
            print('*******************',brain,mask,atlas,tr,motion_param,func2std_mat)

            sub_id_extracted = re.search('.+_subject_id_(\d+)', brain).group(1)
            if str(subject_id) in brain:
    #             print("Files for subject ",subject_id,brain,mask,atlas,tr,motion_param)
                return brain,mask,atlas,tr,motion_param,func2std_mat,MNI3mm_path

        print ('Unable to locate Subject: ',subject_id,'extracted: ',sub_id_extracted)
        # print ('Unable to locate Subject: ',subject_id)
        raise Exception('Unable to locate Subject: ',subject_id,'extracted: ',sub_id_extracted)
        # raise Exception('Unable to locate Subject: ',subject_id)
        return 0




    # Make a node
    getSubjectFilenames = Node(Function(function=get_subject_filenames, input_names=['subject_id','brain_path','mask_path','atlas_path','tr_path','motion_params_path','func2std_mat_path','MNI3mm_path'],
                                    output_names=['brain','mask','atlas','tr','motion_param','func2std_mat', 'MNI3mm_path']), name='getSubjectFilenames')


    getSubjectFilenames.inputs.brain_path = brain_path
    getSubjectFilenames.inputs.mask_path = mask_path
    getSubjectFilenames.inputs.atlas_path = atlas_path
    getSubjectFilenames.inputs.tr_path = tr_path
    getSubjectFilenames.inputs.motion_params_path = motion_params_path
    getSubjectFilenames.inputs.func2std_mat_path = func2std_mat_path
    getSubjectFilenames.inputs.MNI3mm_path = MNI3mm_path




    infosource = Node(IdentityInterface(fields=['subject_id']),
                      name="infosource")

    infosource.iterables = [('subject_id',subject_list)]



    # ## Band Pass Filtering
    # Let's do a band pass filtering on the data using the
    # code from https://neurostars.org/t/bandpass-filtering-different-outputs-from-fsl-and-nipype-custom-function/824/2

    ### AFNI

    bandpass = Node(afni.Bandpass(highpass=0.01, lowpass=0.1,
                             despike=False, no_detrend=True, notrans=True,
                             outputtype='NIFTI_GZ'),name='bandpass')

    # bandpass = Node(afni.Bandpass(highpass=0.001, lowpass=0.01,
    #                          despike=False, no_detrend=True, notrans=True,
    #                          tr=2.0,outputtype='NIFTI_GZ'),name='bandpass')


    # ## Highpass filtering

    # In[506]:

    """
    Perform temporal highpass filtering on the data
    """

    # https://afni.nimh.nih.gov/pub/dist/doc/program_help/3dBandpass.html
    # os.chdir('/home1/varunk/Autism-Connectome-Analysis-bids-related/')

    highpass = Node(afni.Bandpass(highpass=0.009, lowpass=99999,
                             despike=False, no_detrend=True, notrans=True,
                             outputtype='NIFTI_GZ'),name='highpass')

    #  FSL bandpass/Highpass
    # highpass = Node(interface=ImageMaths(suffix='_tempfilt'),
    #                   iterfield=['in_file'],
    #                   name='highpass')
    #
    # highpass.inputs.op_string = '-bptf 27.77775001525879  -1' # 23.64 # 31.25


    # ## Smoothing
    # ### Using 6mm fwhm
    # sigma = 6/2.3548 = 2.547987090198743

    spatialSmooth = Node(interface=ImageMaths(op_string='-s 2.5479',
                                                suffix='_smoothed'),
                       name='spatialSmooth')


    # ## Performs Gram Schmidt Process
    # https://en.wikipedia.org/wiki/Gram%E2%80%93Schmidt_process

    # In[509]:


    def orthogonalize(in_file, mask_file):
        import numpy as np
        import nibabel as nib
        import os
        from os.path import join as opj

        def gram_schmidt(voxel_time_series, mean_vector):
            numerator = np.dot(voxel_time_series,mean_vector)
            dinominator = np.dot(mean_vector,mean_vector)
            voxel_time_series_orthogonalized = voxel_time_series - (numerator/dinominator)*mean_vector

    #         TO CONFIRM IF THE VECTORS ARE ORTHOGONAL
    #         sum_dot_prod = np.sum(np.dot(voxel_time_series_orthogonalized,mean_vector))

    #         print('Sum of entries of orthogonalized vector = ',sum_dot_prod)
            return voxel_time_series_orthogonalized


        mask_data = nib.load(mask_file)
        mask = mask_data.get_data()

        brain_data = nib.load(in_file)
        brain = brain_data.get_data()

        x_dim, y_dim, z_dim, t_dim = brain_data.shape



        # Find mean brain


        mean_vector = np.zeros(t_dim)


        num_brain_voxels = 0

        # Count the number of brain voxels
        for i in range(x_dim):
            for j in range(y_dim):
                for k in range(z_dim):
                    if mask[i,j,k] == 1:
                        mean_vector = mean_vector + brain[i,j,k,:]
                        num_brain_voxels = num_brain_voxels + 1


        mean_vector = mean_vector / num_brain_voxels

        # Orthogonalize
        for i in range(x_dim):
            for j in range(y_dim):
                for k in range(z_dim):
                    if mask[i,j,k] == 1:
                        brain[i,j,k,:] = gram_schmidt(brain[i,j,k,:], mean_vector)



        sub_id = in_file.split('/')[-1].split('.')[0].split('_')[0].split('-')[1]

        gsr_file_name = 'sub-' + sub_id + '_task-rest_run-1_bold.nii.gz'

    #     gsr_file_name_nii = gsr_file_name + '.nii.gz'

        out_file = opj(os.getcwd(),gsr_file_name) # path

        brain_with_header = nib.Nifti1Image(brain, affine=brain_data.affine,header = brain_data.header)
        nib.save(brain_with_header,gsr_file_name)

        return out_file








    # In[510]:


    globalSignalRemoval = Node(Function(function=orthogonalize, input_names=['in_file','mask_file'],
                                      output_names=['out_file']), name='globalSignalRemoval' )
    # globalSignalRemoval.inputs.mask_file = mask_file
    # globalSignalRemoval.iterables = [('in_file',file_paths)]


    # ## GLM for regression of motion parameters

    # In[511]:


    def calc_residuals(in_file,
                       motion_file):
        """
        Calculates residuals of nuisance regressors -motion parameters for every voxel for a subject using GLM.

        Parameters
        ----------
        in_file : string
            Path of a subject's motion corrected nifti file.
        motion_par_file : string
            path of a subject's motion parameters


        Returns
        -------
        out_file : string
            Path of residual file in nifti format

        """
        import nibabel as nb
        import numpy as np
        import os
        from os.path import join as opj
        nii = nb.load(in_file)
        data = nii.get_data().astype(np.float32)
        global_mask = (data != 0).sum(-1) != 0


        # Check and define regressors which are provided from files
        if motion_file is not None:
            motion = np.genfromtxt(motion_file)
            if motion.shape[0] != data.shape[3]:
                raise ValueError('Motion parameters {0} do not match data '
                                 'timepoints {1}'.format(motion.shape[0],
                                                         data.shape[3]))
            if motion.size == 0:
                raise ValueError('Motion signal file {0} is '
                                 'empty'.format(motion_file))

        # Calculate regressors
        regressor_map = {'constant' : np.ones((data.shape[3],1))}

        regressor_map['motion'] = motion


        X = np.zeros((data.shape[3], 1))

        for rname, rval in regressor_map.items():
            X = np.hstack((X, rval.reshape(rval.shape[0],-1)))

        X = X[:,1:]

        if np.isnan(X).any() or np.isnan(X).any():
            raise ValueError('Regressor file contains NaN')

        Y = data[global_mask].T

        try:
            B = np.linalg.inv(X.T.dot(X)).dot(X.T).dot(Y)
        except np.linalg.LinAlgError as e:
            if "Singular matrix" in e:
                raise Exception("Error details: {0}\n\nSingular matrix error: "
                                "The nuisance regression configuration you "
                                "selected may have been too stringent, and the "
                                "regression could not be completed. Ensure your "
                                "parameters are not too "
                                "extreme.\n\n".format(e))
            else:
                raise Exception("Error details: {0}\n\nSomething went wrong with "
                                "nuisance regression.\n\n".format(e))

        Y_res = Y - X.dot(B)

        data[global_mask] = Y_res.T

        img = nb.Nifti1Image(data, header=nii.get_header(),
                             affine=nii.get_affine())

        subject_name = in_file.split('/')[-1].split('.')[0]
        filename = subject_name + '_residual.nii.gz'
        out_file = os.path.join(os.getcwd(),filename )
        img.to_filename(out_file) # alt to nib.save

        return out_file


    # In[512]:


    # Create a Node for above
    calc_residuals = Node(Function(function=calc_residuals, input_names=['in_file','motion_file'],
                                    output_names=['out_file']), name='calc_residuals')


    # ## Datasink
    # I needed to define the structure of what files are saved and where.

    # In[513]:


    # Create DataSink object
    dataSink = Node(DataSink(), name='datasink')

    # Name of the output folder
    dataSink.inputs.base_directory = opj(base_directory,fc_datasink_name)




    # To create the substitutions I looked the `datasink` folder where I was redirecting the output. I manually selected the part of file/folder name that I wanted to change and copied below to be substituted.
    #

    # In[514]:


    # Define substitution strings so that the data is similar to BIDS
    substitutions = [('_subject_id_', 'sub-')]

    # Feed the substitution strings to the DataSink node
    dataSink.inputs.substitutions = substitutions



    # ### Following is a Join Node that collects the preprocessed file paths and saves them in a file

    # In[516]:


    def save_file_list_function(in_fc_map_brain_file):
        # Imports
        import numpy as np
        import os
        from os.path import join as opj


        file_list = np.asarray(in_fc_map_brain_file)
        print('######################## File List ######################: \n',file_list)

        np.save('fc_map_brain_file_list',file_list)
        file_name = 'fc_map_brain_file_list.npy'
        out_fc_map_brain_file = opj(os.getcwd(),file_name) # path






        return out_fc_map_brain_file



    # In[517]:


    save_file_list = JoinNode(Function(function=save_file_list_function, input_names=['in_fc_map_brain_file'],
                     output_names=['out_fc_map_brain_file']),
                     joinsource="infosource",
                     joinfield=['in_fc_map_brain_file'],
                     name="save_file_list")


    # ## Create a FC node
    #
    # This node:
    # 1. Exracts the average time series of the brain ROI's using the atlas and stores
    #     it as a matrix of size [ROIs x Volumes].
    # 2. Extracts the Voxel time series and stores it in matrix of size [Voxels x Volumes]
    #


    # And save  FC matrix files in shape of brains
    def pear_coff(in_file, atlas_file, mask_file):
        # code to find how many voxels are in the brain region using the mask

            # imports
        import numpy as np
        import nibabel as nib
        import os
        from os.path import join as opj

        mask_data = nib.load(mask_file)
        mask = mask_data.get_data()

        x_dim, y_dim, z_dim = mask_data.shape


        atlasPath = atlas_file
        # Read the atlas
        atlasObject = nib.load(atlasPath)
        atlas = atlasObject.get_data()

        num_ROIs = int((np.max(atlas) - np.min(atlas) ))


        # Read the brain in_file

        brain_data = nib.load(in_file)
        brain = brain_data.get_data()

        x_dim, y_dim, z_dim, num_volumes = brain.shape


        num_brain_voxels = 0

        x_dim, y_dim, z_dim = mask_data.shape

        for i in range(x_dim):
            for j in range(y_dim):
                for k in range(z_dim):
                    if mask[i,j,k] == 1:
                        num_brain_voxels = num_brain_voxels + 1

        # Initialize a matrix of ROI time series and voxel time series

        ROI_matrix = np.zeros((num_ROIs, num_volumes))
        voxel_matrix = np.zeros((num_brain_voxels, num_volumes))

        # Fill up the voxel_matrix

        voxel_counter = 0
        for i in range(x_dim):
            for j in range(y_dim):
                for k in range(z_dim):
                    if mask[i,j,k] == 1:
                        voxel_matrix[voxel_counter,:] = brain[i,j,k,:]
                        voxel_counter = voxel_counter + 1


        # Fill up the ROI_matrix
        # Keep track of number of voxels per ROI as well by using an array - num_voxels_in_ROI[]

        num_voxels_in_ROI = np.zeros((num_ROIs,1)) # A column arrray containing number of voxels in each ROI

        for i in range(x_dim):
            for j in range(y_dim):
                for k in range(z_dim):
                    label = int(atlas[i,j,k]) - 1
                    if label != -1:
                        ROI_matrix[label,:] = np.add(ROI_matrix[label,:], brain[i,j,k,:])
                        num_voxels_in_ROI[label,0] = num_voxels_in_ROI[label,0] + 1

        ROI_matrix = np.divide(ROI_matrix,num_voxels_in_ROI) # Check if divide is working correctly

        X, Y = ROI_matrix, voxel_matrix


        # Subtract mean from X and Y

        X = np.subtract(X, np.mean(X, axis=1, keepdims=True))
        Y = np.subtract(Y, np.mean(Y, axis=1, keepdims=True))

        temp1 = np.dot(X,Y.T)
        temp2 = np.sqrt(np.sum(np.multiply(X,X), axis=1, keepdims=True))
        temp3 = np.sqrt(np.sum(np.multiply(Y,Y), axis=1, keepdims=True))
        temp4 = np.dot(temp2,temp3.T)
        coff_matrix = np.divide(temp1, (temp4 + 1e-7))


        # Check if any ROI is missing and replace the NAN values in coff_matrix by 0
        if np.argwhere(np.isnan(coff_matrix)).shape[0] != 0:
            print("Some ROIs are not present. Replacing NAN in coff matrix by 0")
            np.nan_to_num(coff_matrix, copy=False)

        # TODO: when I have added 1e-7 in the dinominator, then why did I feel the need to replace NAN by zeros
        sub_id = in_file.split('/')[-1].split('.')[0].split('_')[0].split('-')[1]


        fc_file_name = sub_id + '_fc_map'

        print ("Pear Matrix calculated for subject: ",sub_id)

        roi_brain_matrix = coff_matrix
        brain_file = in_file


        x_dim, y_dim, z_dim, t_dim = brain.shape

        (brain_data.header).set_data_shape([x_dim,y_dim,z_dim,num_ROIs])

        brain_roi_tensor = np.zeros((brain_data.header.get_data_shape()))

        print("Creating brain for Subject-",sub_id)
        for roi in range(num_ROIs):
            brain_voxel_counter = 0
            for i in range(x_dim):
                for j in range(y_dim):
                    for k in range(z_dim):
                        if mask[i,j,k] == 1:
                            brain_roi_tensor[i,j,k,roi] = roi_brain_matrix[roi,brain_voxel_counter]
                            brain_voxel_counter = brain_voxel_counter + 1


            assert (brain_voxel_counter == len(roi_brain_matrix[roi,:]))
        print("Created brain for Subject-",sub_id)


        path = os.getcwd()
        fc_file_name = fc_file_name + '.nii.gz'
        out_file = opj(path,fc_file_name)

        brain_with_header = nib.Nifti1Image(brain_roi_tensor, affine=brain_data.affine,header = brain_data.header)
        nib.save(brain_with_header,out_file)


        fc_map_brain_file = out_file
        return fc_map_brain_file



    # In[521]:


    # Again Create the Node and set default values to paths

    pearcoff = Node(Function(function=pear_coff, input_names=['in_file','atlas_file','mask_file'],
                                    output_names=['fc_map_brain_file']), name='pearcoff')



    # # IMPORTANT:
    # * The ROI 255 has been removed due to resampling. Therefore the FC maps will have nan at that row. So don't use that ROI :)
    # * I came to know coz I keep getting this error: RuntimeWarning: invalid value encountered in true_divide
    # * To debug it, I read the coff matrix and checked its diagnol to discover the nan value.
    #
    #
    #

    # ## Extract volumes




    # ExtractROI - For volCorrect
    extract = Node(ExtractROI(t_size=-1),
                   output_type='NIFTI',
                   name="extract")



    # ###  Node for applying xformation matrix to functional data
    #

    # In[523]:


    func2std_xform = Node(FLIRT(output_type='NIFTI_GZ',
                             apply_xfm=True), name="func2std_xform")





    # motion_param_regression = 1
    # band_pass_filtering = 0
    # global_signal_regression = 0
    # smoothing = 1
    # volcorrect = 1
    if num_proc == None:
        num_proc = 7

    combination = 'motionRegress' + str(int(motion_param_regression)) + \
     'global' + str(int(global_signal_regression)) + 'smoothing' + str(int(smoothing)) +\
     'filt' + str(int(band_pass_filtering))

    print("Combination: ",combination)

    binary_string = str(int(motion_param_regression)) + str(int(global_signal_regression)) + \
    str(int(smoothing)) + str(int(band_pass_filtering)) + str(int(volcorrect))

    base_dir = opj(base_directory,functional_connectivity_directory)
    # wf = Workflow(name=functional_connectivity_directory)
    wf = Workflow(name=combination)

    wf.base_dir = base_dir # Dir where all the outputs will be stored.

    wf.connect(infosource ,'subject_id', getSubjectFilenames, 'subject_id')


    # ------- Dynamic Pipeline ------------------------


    nodes = [
    calc_residuals,
    globalSignalRemoval,
    spatialSmooth,
    bandpass,
    volCorrect]


    # from nipype.interfaces import fsl

    old_node = getSubjectFilenames
    old_node_output = 'brain'

    binary_string = binary_string+'0' # so that the loop runs one more time
    for idx, include in enumerate(binary_string):
        # 11111
        # motion_param_regression
        # global_signal_regression
        # smoothing
        # band_pass_filtering
        # volcorrect

        if old_node == calc_residuals:
            old_node_output = 'out_file'
        elif old_node == extract :
            old_node_output = 'roi_file'
        elif old_node == globalSignalRemoval:
            old_node_output = 'out_file'
        elif old_node == bandpass:
            old_node_output = 'out_file'
        elif old_node == highpass:
            old_node_output = 'out_file'
        elif old_node == spatialSmooth:
            old_node_output = 'out_file'
        elif old_node == volCorrect:
            old_node_output = 'out_file'


        if int(include):
            # if old_node is None:
            #
            #     wf.add_nodes([nodes[idx]])
            #
            # else:



            new_node = nodes[idx]


            if new_node == calc_residuals:
                wf.connect([(getSubjectFilenames, calc_residuals, [('motion_param', 'motion_file')])])
                new_node_input = 'in_file'

            elif new_node == extract :
                wf.connect([( volCorrect, extract, [('t_min','t_min')])])
                new_node_input = 'in_file'

            elif new_node == globalSignalRemoval:
                wf.connect([(getSubjectFilenames, globalSignalRemoval, [('mask','mask_file')])])
                new_node_input = 'in_file'

            elif new_node == bandpass:
                wf.connect([(getSubjectFilenames, bandpass, [('tr','tr')])])
                new_node_input = 'in_file'

            elif new_node == highpass:
                wf.connect([(getSubjectFilenames, highpass, [('tr','tr')])]) #Commenting for FSL
                new_node_input = 'in_file'

            elif new_node == spatialSmooth:
                new_node_input = 'in_file'

            elif new_node == volCorrect:
                wf.connect([(infosource, volCorrect, [('subject_id','sub_id')])])
                wf.connect([( volCorrect, extract, [('t_min','t_min')])])
                new_node = extract
                new_node_input = 'in_file'


            wf.connect(old_node, old_node_output, new_node, new_node_input)

            old_node = new_node


        else:
            if idx == 3: # bandpas == 0 => Highpass
                new_node = highpass
                wf.connect([(getSubjectFilenames, highpass, [('tr','tr')])]) #Commenting for FSL
                new_node_input = 'in_file'

                wf.connect(old_node, old_node_output, new_node, new_node_input)

                old_node = new_node

    wf.connect(old_node, old_node_output, pearcoff, 'in_file')
    wf.connect(getSubjectFilenames,'atlas', pearcoff, 'atlas_file')
    wf.connect(getSubjectFilenames, 'mask', pearcoff, 'mask_file')

    wf.connect(pearcoff, 'fc_map_brain_file', func2std_xform ,'in_file')
    wf.connect(getSubjectFilenames,'func2std_mat', func2std_xform, 'in_matrix_file')
    wf.connect(getSubjectFilenames, 'MNI3mm_path', func2std_xform,'reference')

    folder_name = combination + '.@fc_map_brain_file'
    wf.connect(func2std_xform, 'out_file',  save_file_list, 'in_fc_map_brain_file')
    wf.connect(save_file_list, 'out_fc_map_brain_file',  dataSink,folder_name)


    TEMP_DIR_FOR_STORAGE = opj(base_directory,'crash_files')
    wf.config = {"execution": {"crashdump_dir": TEMP_DIR_FOR_STORAGE}}

    wf.write_graph(graph2use='flat', format='png')
    wf.run('MultiProc', plugin_args={'n_procs': num_proc})
Exemple #9
0
def canonical(
    subjects_participants,
    regdir,
    f2s,
    template="~/GitHub/mriPipeline/templates/waxholm/WHS_SD_rat_T2star_v1.01_downsample3.nii.gz",
    f_file_format="~/GitHub/mripipeline/base/preprocessing/generic_work/_subject_session_{subject}.{session}/_scan_type_SE_EPI/f_bru2nii/",
    s_file_format="~/GitHub/mripipeline/base/preprocessing/generic_work/_subject_session_{subject}.{session}/_scan_type_T2_TurboRARE/s_bru2nii/",
):
    """Warp a functional image based on the functional-to-structural and the structural-to-template registrations.
	Currently this approach is failing because the functiona-to-structural registration pushes the brain stem too far down.
	This may be

	"""
    template = os.path.expanduser(template)
    for subject_participant in subjects_participants:
        func_image_dir = os.path.expanduser(
            f_file_format.format(**subject_participant))
        struct_image_dir = os.path.expanduser(
            s_file_format.format(**subject_participant))
        try:
            for myfile in os.listdir(func_image_dir):
                if myfile.endswith((".nii.gz", ".nii")):
                    func_image = os.path.join(func_image_dir, myfile)
            for myfile in os.listdir(struct_image_dir):
                if myfile.endswith((".nii.gz", ".nii")):
                    struct_image = os.path.join(struct_image_dir, myfile)
        except FileNotFoundError:
            pass
        else:
            #struct
            n4 = ants.N4BiasFieldCorrection()
            n4.inputs.dimension = 3
            n4.inputs.input_image = struct_image
            # correction bias is introduced (along the z-axis) if the following value is set to under 85. This is likely contingent on resolution.
            n4.inputs.bspline_fitting_distance = 100
            n4.inputs.shrink_factor = 2
            n4.inputs.n_iterations = [200, 200, 200, 200]
            n4.inputs.convergence_threshold = 1e-11
            n4.inputs.output_image = '{}/ss_n4_{}_ofM{}.nii.gz'.format(
                regdir, participant, i)
            n4_res = n4.run()

            _n4 = ants.N4BiasFieldCorrection()
            _n4.inputs.dimension = 3
            _n4.inputs.input_image = struct_image
            # correction bias is introduced (along the z-axis) if the following value is set to under 85. This is likely contingent on resolution.
            _n4.inputs.bspline_fitting_distance = 95
            _n4.inputs.shrink_factor = 2
            _n4.inputs.n_iterations = [500, 500, 500, 500]
            _n4.inputs.convergence_threshold = 1e-14
            _n4.inputs.output_image = '{}/ss__n4_{}_ofM{}.nii.gz'.format(
                regdir, participant, i)
            _n4_res = _n4.run()

            #we do this on a separate bias-corrected image to remove hyperintensities which we have to create in order to prevent brain regions being caught by the negative threshold
            struct_cutoff = ImageMaths()
            struct_cutoff.inputs.op_string = "-thrP 20 -uthrp 98"
            struct_cutoff.inputs.in_file = _n4_res.outputs.output_image
            struct_cutoff_res = struct_cutoff.run()

            struct_BET = BET()
            struct_BET.inputs.mask = True
            struct_BET.inputs.frac = 0.3
            struct_BET.inputs.robust = True
            struct_BET.inputs.in_file = struct_cutoff_res.outputs.out_file
            struct_BET_res = struct_BET.run()

            struct_mask = ApplyMask()
            struct_mask.inputs.in_file = n4_res.outputs.output_image
            struct_mask.inputs.mask_file = struct_BET_res.outputs.mask_file
            struct_mask_res = struct_mask.run()

            struct_registration = ants.Registration()
            struct_registration.inputs.fixed_image = template
            struct_registration.inputs.output_transform_prefix = "output_"
            struct_registration.inputs.transforms = ['Affine', 'SyN']  ##
            struct_registration.inputs.transform_parameters = [(1.0, ),
                                                               (1.0, 3.0, 5.0)
                                                               ]  ##
            struct_registration.inputs.number_of_iterations = [[
                2000, 1000, 500
            ], [100, 100, 100]]  #
            struct_registration.inputs.dimension = 3
            struct_registration.inputs.write_composite_transform = True
            struct_registration.inputs.collapse_output_transforms = True
            struct_registration.inputs.initial_moving_transform_com = True
            # Tested on Affine transform: CC takes too long; Demons does not tilt, but moves the slices too far caudally; GC tilts too much on
            struct_registration.inputs.metric = ['MeanSquares', 'Mattes']
            struct_registration.inputs.metric_weight = [1, 1]
            struct_registration.inputs.radius_or_number_of_bins = [16, 32]  #
            struct_registration.inputs.sampling_strategy = ['Random', None]
            struct_registration.inputs.sampling_percentage = [0.3, 0.3]
            struct_registration.inputs.convergence_threshold = [1.e-11,
                                                                1.e-8]  #
            struct_registration.inputs.convergence_window_size = [20, 20]
            struct_registration.inputs.smoothing_sigmas = [[4, 2, 1],
                                                           [4, 2, 1]]
            struct_registration.inputs.sigma_units = ['vox', 'vox']
            struct_registration.inputs.shrink_factors = [[3, 2, 1], [3, 2, 1]]
            struct_registration.inputs.use_estimate_learning_rate_once = [
                True, True
            ]
            # if the fixed_image is not acquired similarly to the moving_image (e.g. RARE to histological (e.g. AMBMC)) this should be False
            struct_registration.inputs.use_histogram_matching = [False, False]
            struct_registration.inputs.winsorize_lower_quantile = 0.005
            struct_registration.inputs.winsorize_upper_quantile = 0.98
            struct_registration.inputs.args = '--float'
            struct_registration.inputs.num_threads = 6

            struct_registration.inputs.moving_image = struct_mask_res.outputs.out_file
            struct_registration.inputs.output_warped_image = '{}/s_{}_ofM{}.nii.gz'.format(
                regdir, participant, i)
            struct_registration_res = struct_registration.run()

            #func
            func_n4 = ants.N4BiasFieldCorrection()
            func_n4.inputs.dimension = 3
            func_n4.inputs.input_image = func_image
            func_n4.inputs.bspline_fitting_distance = 100
            func_n4.inputs.shrink_factor = 2
            func_n4.inputs.n_iterations = [200, 200, 200, 200]
            func_n4.inputs.convergence_threshold = 1e-11
            func_n4.inputs.output_image = '{}/f_n4_{}_ofM{}.nii.gz'.format(
                regdir, participant, i)
            func_n4_res = func_n4.run()

            func_registration = ants.Registration()
            func_registration.inputs.fixed_image = n4_res.outputs.output_image
            func_registration.inputs.output_transform_prefix = "func_"
            func_registration.inputs.transforms = [f2s]
            func_registration.inputs.transform_parameters = [(0.1, )]
            func_registration.inputs.number_of_iterations = [[40, 20, 10]]
            func_registration.inputs.dimension = 3
            func_registration.inputs.write_composite_transform = True
            func_registration.inputs.collapse_output_transforms = True
            func_registration.inputs.initial_moving_transform_com = True
            func_registration.inputs.metric = ['MeanSquares']
            func_registration.inputs.metric_weight = [1]
            func_registration.inputs.radius_or_number_of_bins = [16]
            func_registration.inputs.sampling_strategy = ["Regular"]
            func_registration.inputs.sampling_percentage = [0.3]
            func_registration.inputs.convergence_threshold = [1.e-2]
            func_registration.inputs.convergence_window_size = [8]
            func_registration.inputs.smoothing_sigmas = [[4, 2,
                                                          1]]  # [1,0.5,0]
            func_registration.inputs.sigma_units = ['vox']
            func_registration.inputs.shrink_factors = [[3, 2, 1]]
            func_registration.inputs.use_estimate_learning_rate_once = [True]
            func_registration.inputs.use_histogram_matching = [False]
            func_registration.inputs.winsorize_lower_quantile = 0.005
            func_registration.inputs.winsorize_upper_quantile = 0.995
            func_registration.inputs.args = '--float'
            func_registration.inputs.num_threads = 6

            func_registration.inputs.moving_image = func_n4_res.outputs.output_image
            func_registration.inputs.output_warped_image = '{}/f_{}_ofM{}.nii.gz'.format(
                regdir, participant, i)
            func_registration_res = func_registration.run()

            warp = ants.ApplyTransforms()
            warp.inputs.reference_image = template
            warp.inputs.input_image_type = 3
            warp.inputs.interpolation = 'Linear'
            warp.inputs.invert_transform_flags = [False, False]
            warp.inputs.terminal_output = 'file'
            warp.inputs.output_image = '{}/{}_ofM{}.nii.gz'.format(
                regdir, participant, i)
            warp.num_threads = 6

            warp.inputs.input_image = func_image
            warp.inputs.transforms = [
                func_registration_res.outputs.composite_transform,
                struct_registration_res.outputs.composite_transform
            ]
            warp.run()
Exemple #10
0
def functional_per_participant_test():
    for i in ["", "_aF", "_cF1", "_cF2", "_pF"]:
        template = "~/ni_data/templates/ds_QBI_chr.nii.gz"
        participant = "4008"
        image_dir = "~/ni_data/ofM.dr/preprocessing/generic_work/_subject_session_{}.ofM{}/_scan_type_7_EPI_CBV/temporal_mean/".format(
            participant, i)
        try:
            for myfile in os.listdir(image_dir):
                if myfile.endswith(".nii.gz"):
                    mimage = os.path.join(image_dir, myfile)
        except FileNotFoundError:
            pass
        else:
            n4 = ants.N4BiasFieldCorrection()
            n4.inputs.dimension = 3
            n4.inputs.input_image = mimage
            n4.inputs.bspline_fitting_distance = 100
            n4.inputs.shrink_factor = 2
            n4.inputs.n_iterations = [200, 200, 200, 200]
            n4.inputs.convergence_threshold = 1e-11
            n4.inputs.output_image = 'n4_{}_ofM{}.nii.gz'.format(
                participant, i)
            n4_res = n4.run()

            functional_cutoff = ImageMaths()
            functional_cutoff.inputs.op_string = "-thrP 30"
            functional_cutoff.inputs.in_file = n4_res.outputs.output_image
            functional_cutoff_res = functional_cutoff.run()

            functional_BET = BET()
            functional_BET.inputs.mask = True
            functional_BET.inputs.frac = 0.5
            functional_BET.inputs.in_file = functional_cutoff_res.outputs.out_file
            functional_BET_res = functional_BET.run()

            registration = ants.Registration()
            registration.inputs.fixed_image = template
            registration.inputs.output_transform_prefix = "output_"
            registration.inputs.transforms = ['Affine', 'SyN']
            registration.inputs.transform_parameters = [(0.1, ),
                                                        (3.0, 3.0, 5.0)]
            registration.inputs.number_of_iterations = [[10000, 10000, 10000],
                                                        [100, 100, 100]]
            registration.inputs.dimension = 3
            registration.inputs.write_composite_transform = True
            registration.inputs.collapse_output_transforms = True
            registration.inputs.initial_moving_transform_com = True
            registration.inputs.metric = ['Mattes'] * 2 + [['Mattes', 'CC']]
            registration.inputs.metric_weight = [1] * 2 + [[0.5, 0.5]]
            registration.inputs.radius_or_number_of_bins = [32] * 2 + [[32, 4]]
            registration.inputs.sampling_strategy = ['Regular'] * 2 + [[
                None, None
            ]]
            registration.inputs.sampling_percentage = [0.3] * 2 + [[
                None, None
            ]]
            registration.inputs.convergence_threshold = [1.e-8] * 2 + [-0.01]
            registration.inputs.convergence_window_size = [20] * 2 + [5]
            registration.inputs.smoothing_sigmas = [[4, 2, 1]] * 2 + [[
                1, 0.5, 0
            ]]
            registration.inputs.sigma_units = ['vox'] * 3
            registration.inputs.shrink_factors = [[3, 2, 1]] * 2 + [[4, 2, 1]]
            registration.inputs.use_estimate_learning_rate_once = [True] * 3
            registration.inputs.use_histogram_matching = [False] * 2 + [True]
            registration.inputs.winsorize_lower_quantile = 0.005
            registration.inputs.winsorize_upper_quantile = 0.995
            registration.inputs.args = '--float'
            registration.inputs.num_threads = 4
            registration.plugin_args = {
                'qsub_args': '-pe orte 4',
                'sbatch_args': '--mem=6G -c 4'
            }

            registration.inputs.moving_image = functional_BET_res.outputs.out_file
            registration.inputs.output_warped_image = '{}_ofM{}.nii.gz'.format(
                participant, i)
            res = registration.run()
Exemple #11
0
def get_wf_tissue_priors(name='wf_tissue_priors3'):
    '''
    This Function gives a workflow that Resamples the tissue priors and then thresholds it at 0.5
    '''
    # csf_tissue_prior_path, gm_tissue_prior_path, wm_tissue_prior_path,
    # threshold = 0.5

    wf_tissue_priors = Workflow(name=name)

    inputspec = Node(IdentityInterface(fields=['csf_tissue_prior_path',  'wm_tissue_prior_path',
                                 'threshold','std2func_mat_path', 'reference_func_file_path']),
                      name="inputspec")
    '''
    # 'gm_tissue_prior_path',

    resample_tissue_prior_csf = Node(Resample(voxel_size=(3, 3, 3), resample_mode='Cu', # cubic interpolation
                             outputtype='NIFTI'),
                    name="resample_tissue_prior_csf")



    # resample_tissue_prior_gm = Node(Resample(voxel_size=(3, 3, 3), resample_mode='Cu', # cubic interpolation
    #                          outputtype='NIFTI'),
    #                 name="resample_tissue_prior_gm")



    resample_tissue_prior_wm = Node(Resample(voxel_size=(3, 3, 3), resample_mode='Cu', # cubic interpolation
                             outputtype='NIFTI'),
                    name="resample_tissue_prior_wm")


    wf_tissue_priors.connect(inputspec, 'csf_tissue_prior_path', resample_tissue_prior_csf, 'in_file' )
    # wf_tissue_priors.connect(inputspec, 'gm_tissue_prior_path', resample_tissue_prior_gm, 'in_file' )
    wf_tissue_priors.connect(inputspec, 'wm_tissue_prior_path', resample_tissue_prior_wm, 'in_file' )
    '''

    # #  Invert the func2anat matrix to get anat2func
    # inv_mat = Node(ConvertXFM(invert_xfm=True), name='inv_mat')
    # wf_tissue_priors.connect(inputspec, 'func2anat_mat_path', inv_mat, 'in_file')

    # Transform the  tissue priors to the functional space using the inverse matrix
    std2func_xform_csf_prior = Node(FLIRT(output_type='NIFTI',
                             apply_xfm=True, interp='sinc'), name='std2func_xform_csf_prior')

    wf_tissue_priors.connect(inputspec, 'reference_func_file_path', std2func_xform_csf_prior, 'reference')
    wf_tissue_priors.connect(inputspec, 'std2func_mat_path', std2func_xform_csf_prior, 'in_matrix_file')

    std2func_xform_wm_prior = Node(FLIRT(output_type='NIFTI',
                                apply_xfm=True, interp='sinc'), name='std2func_xform_wm_prior')
    wf_tissue_priors.connect(inputspec, 'reference_func_file_path', std2func_xform_wm_prior, 'reference')
    wf_tissue_priors.connect(inputspec, 'std2func_mat_path', std2func_xform_wm_prior, 'in_matrix_file')

    # Transformed the priors
    #  Get the input in_file(s) of the std2func_xform_csf and std2func_xform_wm from the old workspace
    wf_tissue_priors.connect(inputspec, 'csf_tissue_prior_path', std2func_xform_csf_prior, 'in_file')
    wf_tissue_priors.connect(inputspec, 'wm_tissue_prior_path', std2func_xform_wm_prior, 'in_file')





    # Threshold

    def get_opstring(threshold, tissue_type):
        if tissue_type == 'csf':
            max = 216  #  216 is the highest intensity of the resampled afni output for CSF
        elif tissue_type == 'wm':
            max = 253 #  253 is the highest intensity of the resampled afni output for WM

        threshold = int(threshold * max)
        op = '-thr '+str(threshold)+' -bin'
        return op

    # ----- CSF ------

    threshold_csf = Node(interface=ImageMaths(suffix='_thresh'),
                       name='threshold_csf')



    wf_tissue_priors.connect(inputspec, ('threshold', get_opstring, 'csf'), threshold_csf, 'op_string' )
    wf_tissue_priors.connect(std2func_xform_csf_prior, 'out_file', threshold_csf, 'in_file')

    # ------- GM --------

    # threshold_gm = Node(interface=ImageMaths(suffix='_thresh'),
    #                    name='threshold_gm')


    # wf_tissue_priors.connect(inputspec, ('threshold', get_opstring), threshold_gm, 'op_string' )
    # wf_tissue_priors.connect(resample_tissue_prior_gm, 'out_file', threshold_gm, 'in_file')

    # -------- WM --------

    threshold_wm = Node(interface=ImageMaths(suffix='_thresh'),
                       name='threshold_wm')

    wf_tissue_priors.connect(inputspec, ('threshold', get_opstring, 'wm'), threshold_wm, 'op_string' )
    wf_tissue_priors.connect(std2func_xform_wm_prior, 'out_file', threshold_wm, 'in_file')

    #  -------------------




    outputspec = Node(IdentityInterface(fields=['csf_tissue_prior_path', 'wm_tissue_prior_path', 'threshold']),
                      name="outputspec")

    # , 'gm_tissue_prior_path'
    wf_tissue_priors.connect(threshold_csf, 'out_file', outputspec, 'csf_tissue_prior_path')
    # wf_tissue_priors.connect(threshold_gm, 'out_file', outputspec, 'gm_tissue_prior_path')
    wf_tissue_priors.connect(threshold_wm, 'out_file', outputspec, 'wm_tissue_prior_path')

    return wf_tissue_priors
Exemple #12
0
def create_fatsegnet_workflow(
    subject_list,
    bids_dir,
    work_dir,
    out_dir,
    bids_templates,
    n4=False
):
    # create initial workflow
    wf = Workflow(name='workflow_fatsegnet', base_dir=work_dir)

    # use infosource to iterate workflow across subject list
    n_infosource = Node(
        interface=IdentityInterface(
            fields=['subject_id']
        ),
        name="subject_source"
        # input: 'subject_id'
        # output: 'subject_id'
    )
    # runs the node with subject_id = each element in subject_list
    n_infosource.iterables = ('subject_id', subject_list)

    # select matching files from bids_dir
    n_selectfiles = Node(
        interface=SelectFiles(
            templates=bids_templates,
            base_directory=bids_dir
        ),
        name='get_subject_data'
        # output: ['fat_composed', 'water_composed']
    )
    wf.connect([
        (n_infosource, n_selectfiles, [('subject_id', 'subject_id_p')])
    ])

    if n4:
        mn_n4_fat = Node(
            interface=N4BiasFieldCorrection(),
            iterfield=['input_image'],
            name='N4_fat',
            # output: 'output_image'
        )
        wf.connect([
            (n_selectfiles, mn_n4_fat, [('fat_composed', 'input_image')]),
        ])

        # https://nipype.readthedocs.io/en/latest/api/generated/nipype.interfaces.ants.html
        mn_n4_water = Node(
            interface=N4BiasFieldCorrection(),
            iterfield=['input_image'],
            name='N4_water',
            # output: 'output_image'
        )
        wf.connect([
            (n_selectfiles, mn_n4_water, [('water_composed', 'input_image')]),
        ])

    # scale data
    # or better: https://intensity-normalization.readthedocs.io/en/latest/utilities.html
    def scale(min_and_max):
        min_value = min_and_max[0][0]
        max_value = min_and_max[0][1]
        fsl_cmd = ""

        # set range to [0, 2pi]
        fsl_cmd += "-mul %.10f " % (4)

        return fsl_cmd

    mn_fat_stats = MapNode(
        # -R : <min intensity> <max intensity>
        interface=ImageStats(op_string='-R'),
        iterfield=['in_file'],
        name='get_stats_fat',
        # output: 'out_stat'
    )
    mn_water_stats = MapNode(
        # -R : <min intensity> <max intensity>
        interface=ImageStats(op_string='-R'),
        iterfield=['in_file'],
        name='get_stats_water',
        # output: 'out_stat'
    )

    if n4:
        wf.connect([
            (mn_n4_fat, mn_fat_stats, [('output_image', 'in_file')]),
            (mn_n4_water, mn_water_stats, [('output_image', 'in_file')])
        ])
    else:
        wf.connect([
            (n_selectfiles, mn_fat_stats, [('fat_composed', 'in_file')]),
            (n_selectfiles, mn_water_stats, [('water_composed', 'in_file')])
        ])
    
    mn_fat_scaled = Node(
        interface=ImageMaths(suffix="_scaled"),
        name='fat_scaled',
        iterfield=['in_file']
        # inputs: 'in_file', 'op_string'
        # output: 'out_file'
    )
    mn_water_scaled = Node(
        interface=ImageMaths(suffix="_scaled"),
        name='water_scaled',
        iterfield=['in_file']
        # inputs: 'in_file', 'op_string'
        # output: 'out_file'
    )
    if n4:
        wf.connect([
            (mn_n4_fat, mn_fat_scaled, [('output_image', 'in_file')]),
            (mn_n4_water, mn_water_scaled, [('output_image', 'in_file')]),
            (mn_fat_stats, mn_fat_scaled, [(('out_stat', scale), 'op_string')]),
            (mn_water_stats, mn_water_scaled, [(('out_stat', scale), 'op_string')])
        ])
    else:
        wf.connect([
            (n_selectfiles, mn_fat_scaled, [('fat_composed', 'in_file')]),
            (n_selectfiles, mn_water_scaled, [('water_composed', 'in_file')]),
            (mn_fat_stats, mn_fat_scaled, [(('out_stat', scale), 'op_string')]),
            (mn_water_stats, mn_water_scaled, [(('out_stat', scale), 'op_string')])
        ])

    # fatsegnet could work here when running on cluster, but in multiproc memory issues on GPU
    # mn_fatsegnet = MapNode(
    #         interface=fatsegnet.FatSegNetInterface(
    #             out_suffix='/afm02/Q2/Q2653/data/2021-01-18-fatsegnet-output/out_5'
    #         ),
    #         iterfield=['water_file', 'fat_file'],
    #         name='fatsegnet'
    #         # output: 'out_file'
    #     )

    # wf.connect([
    #         (mn_fat_scaled, mn_fatsegnet, [('out_file', 'fat_file')]),
    #         (mn_water_scaled, mn_fatsegnet, [('out_file', 'water_file')]),
    #     ])


    # datasink
    n_datasink_fat = Node(
        interface=DataSink(base_directory=bids_dir, 
            container=out_dir,
            parameterization=True, 
            substitutions=[('_subject_id_', '')],
            regexp_substitutions=[('sub-\w{5}_t1.*', 'FatImaging_F.nii.gz')]),
        name='datasink_fat'
    )    
    n_datasink_water = Node(
        interface=DataSink(base_directory=bids_dir, 
            container=out_dir,
            parameterization=True, 
            substitutions=[('_subject_id_', '')],
            regexp_substitutions=[('sub-\w{5}_t1.*', 'FatImaging_W.nii.gz')]),
        name='datasink_water'
    )
    # https://pythex.org/: search for sub-, then 5 numbers then _t1 and grab the rest

    wf.connect([
            (mn_fat_scaled, n_datasink_fat, [('out_file', 'preprocessed_mul4.@fat')]),
            (mn_water_scaled, n_datasink_water, [('out_file', 'preprocessed_mul4.@water')]),
        ])
    # https://nipype.readthedocs.io/en/0.11.0/users/grabbing_and_sinking.html
    # https://miykael.github.io/nipype_tutorial/notebooks/example_1stlevel.html
    # The period (.) indicates that a subfolder should be created. 
    # But if we wanted to store it in the same folder, 
    # we would use the .@ syntax. The @ tells the DataSink interface to not create the subfolder. 

    return wf
Exemple #13
0
<< << << < HEAD
== == == =

>>>>>> > e5874a2... BUG Fixed CEST workflow
    backg_fit = Node(LtzWaterMT(sequence=sequence, verbose=True),
                     name='backg_fit')
    # Simulate data for all frequencies
    sequence['MTSat']['sat_f0'] = zfrqs.tolist()
    sequence['MTSat']['sat_angle'] = np.repeat(180.0, len(zfrqs)).tolist()
    backg_sim = Node(LtzWaterMTSim(sequence=sequence,
                                   noise=0,
                                   out_file='backg_sim.nii.gz',
                                   verbose=True),
                     name='backg_sim')
    backg_sub = Node(ImageMaths(op_string='-sub',
                                out_file='no_backg_sub.nii.gz'),
                     name='backg_sub', iterfield=['in_file'])

    f0_indices = (np.abs(zfrqs) > 0.99) & (np.abs(zfrqs) < 10.1)
    sequence['MTSat']['sat_f0'] = zfrqs[f0_indices].tolist()
    sequence['MTSat']['sat_angle'] = np.repeat(
        180.0, len(f0_indices)).tolist()
    an_select = Node(Select(volumes=np.where(f0_indices)[0].tolist(), out_file='fg_zspec.nii.gz'),
                     name='an_select')
<< << << < HEAD
    an_fit = Node(LamideFit(sequence=sequence,
                            Zref=0.0,
                            additive=True,
                            verbose=True),
== == == =
Exemple #14
0
datasink.inputs.container = output_dir

# DataSink Substitutions
substitutions = [('_subject_id', ''), ('_subject','subject'), # Substitutions have an order!!!
                  ('_in_matrix_file_', ''), # Turning a node into an iterable defaults to naming the output file to input variable name
                  ('%s' % outpath_mat.replace('/','..'), ''), # When creating own nodes, output files are (by default) named according to the absolute path, however Nipype replaces '/' with '..' to avoid creating directories. It's ugly. This is my 'make-do' workaround.
                  ('.mat', ''),
                  ('_voxel_size_','vox_dims_'), 
                  ('anat_flirt','anat_tform'), # Output files are (by default) named according to the input function or variable name. Each additional node will add '_functionname'. Be careful when creating own nodes. Nipype gets confused. Overwritten or misrecognised in 'processing' folder.
                  ('anat_resample','anat_rsmpl'),
                  ('_var_','var_')
                  ]
datasink.inputs.substitutions = substitutions

# Smooth
smooth = Node(ImageMaths(op_string='-fmean -s 2'), 
                name="smooth")
smooth_spm = Node(Smooth(fwhm=smoothing_size),
                    name="smooth")

# Resample
resample = Node(Resample(outputtype='NIFTI',
                          resample_mode='Li'), 
                  name="resample")
resample.iterables = ('voxel_size', vox_lst(min_dim,max_dim,step_dim)) # Example of turning a node (function) into an iterable, e.g. nodename.iterables = ('parameter',list_of_iterables). Depending on parameter, list may need to be tuple, etc.

# Noise
noise = Node(interface=Function(input_names=['base_dir','output_dir','in_file','var'], # Self-created node. Needs improvement.
                                output_names=['out_file'], 
                                function=snr_img), 
              name="noise")
def group_onesample_openfmri(dataset_dir,
                             model_id=None,
                             task_id=None,
                             l1output_dir=None,
                             out_dir=None,
                             no_reversal=False):

    wk = Workflow(name='one_sample')
    wk.base_dir = os.path.abspath(work_dir)

    info = Node(
        util.IdentityInterface(fields=['model_id', 'task_id', 'dataset_dir']),
        name='infosource')
    info.inputs.model_id = model_id
    info.inputs.task_id = task_id
    info.inputs.dataset_dir = dataset_dir

    num_copes = contrasts_num(model_id, task_id, dataset_dir)

    dg = Node(DataGrabber(infields=['model_id', 'task_id', 'cope_id'],
                          outfields=['copes', 'varcopes']),
              name='grabber')
    dg.inputs.template = os.path.join(
        l1output_dir, 'model%03d/task%03d/*/%scopes/mni/%scope%02d.nii.gz')
    dg.inputs.template_args['copes'] = [[
        'model_id', 'task_id', '', '', 'cope_id'
    ]]
    dg.inputs.template_args['varcopes'] = [[
        'model_id', 'task_id', 'var', 'var', 'cope_id'
    ]]
    dg.iterables = ('cope_id', num_copes)

    dg.inputs.sort_filelist = True

    wk.connect(info, 'model_id', dg, 'model_id')
    wk.connect(info, 'task_id', dg, 'task_id')

    model = Node(L2Model(), name='l2model')

    wk.connect(dg, ('copes', get_len), model, 'num_copes')

    mergecopes = Node(Merge(dimension='t'), name='merge_copes')
    wk.connect(dg, 'copes', mergecopes, 'in_files')

    mergevarcopes = Node(Merge(dimension='t'), name='merge_varcopes')
    wk.connect(dg, 'varcopes', mergevarcopes, 'in_files')

    mask_file = fsl.Info.standard_image('MNI152_T1_2mm_brain_mask.nii.gz')
    flame = Node(FLAMEO(), name='flameo')
    flame.inputs.mask_file = mask_file
    flame.inputs.run_mode = 'flame1'

    wk.connect(model, 'design_mat', flame, 'design_file')
    wk.connect(model, 'design_con', flame, 't_con_file')
    wk.connect(mergecopes, 'merged_file', flame, 'cope_file')
    wk.connect(mergevarcopes, 'merged_file', flame, 'var_cope_file')
    wk.connect(model, 'design_grp', flame, 'cov_split_file')

    smoothest = Node(SmoothEstimate(), name='smooth_estimate')
    wk.connect(flame, 'zstats', smoothest, 'zstat_file')
    smoothest.inputs.mask_file = mask_file

    cluster = Node(Cluster(), name='cluster')
    wk.connect(smoothest, 'dlh', cluster, 'dlh')
    wk.connect(smoothest, 'volume', cluster, 'volume')
    cluster.inputs.connectivity = 26
    cluster.inputs.threshold = 2.3
    cluster.inputs.pthreshold = 0.05
    cluster.inputs.out_threshold_file = True
    cluster.inputs.out_index_file = True
    cluster.inputs.out_localmax_txt_file = True

    wk.connect(flame, 'zstats', cluster, 'in_file')

    ztopval = Node(ImageMaths(op_string='-ztop', suffix='_pval'),
                   name='z2pval')
    wk.connect(flame, 'zstats', ztopval, 'in_file')

    sinker = Node(DataSink(), name='sinker')
    sinker.inputs.base_directory = os.path.abspath(out_dir)
    sinker.inputs.substitutions = [('_cope_id', 'contrast'),
                                   ('_maths__', '_reversed_')]

    wk.connect(flame, 'zstats', sinker, 'stats')
    wk.connect(cluster, 'threshold_file', sinker, 'stats.@thr')
    wk.connect(cluster, 'index_file', sinker, 'stats.@index')
    wk.connect(cluster, 'localmax_txt_file', sinker, 'stats.@localmax')

    if no_reversal == False:
        zstats_reverse = Node(BinaryMaths(), name='zstats_reverse')
        zstats_reverse.inputs.operation = 'mul'
        zstats_reverse.inputs.operand_value = -1
        wk.connect(flame, 'zstats', zstats_reverse, 'in_file')

        cluster2 = cluster.clone(name='cluster2')
        wk.connect(smoothest, 'dlh', cluster2, 'dlh')
        wk.connect(smoothest, 'volume', cluster2, 'volume')
        wk.connect(zstats_reverse, 'out_file', cluster2, 'in_file')

        ztopval2 = ztopval.clone(name='ztopval2')
        wk.connect(zstats_reverse, 'out_file', ztopval2, 'in_file')

        wk.connect(zstats_reverse, 'out_file', sinker, 'stats.@neg')
        wk.connect(cluster2, 'threshold_file', sinker, 'stats.@neg_thr')
        wk.connect(cluster2, 'index_file', sinker, 'stats.@neg_index')
        wk.connect(cluster2, 'localmax_txt_file', sinker,
                   'stats.@neg_localmax')

    return wk
Exemple #16
0
    os.chdir(experiment_dir + '/' + sub)
    #location/directory containing all the data for this study/subject
    ae = amico.Evaluation(experiment_dir, sub)

    #load data
    if amico_model == "NODDI":
        mask_file = "data_brain_mask.nii.gz"
        # generate scheme file from the bvals/bvecs
        amico.util.fsl2scheme('data.bval', 'data.bvec')
        amico_dir = opj(experiment_dir, sub, amico_model)
        if not os.path.exists(amico_dir):
            os.mkdir(amico_dir)
        os.rename('data.scheme', amico_dir + '/noddi.scheme')

    if amico_model == "CylinderZeppelinBall":
        threshL = ImageMaths(op_string='-thr 2 -uthr 2')
        threshL.inputs.in_file = experiment_dir + sub + '/aseg_trans.nii.gz'
        threshL.inputs.out_file = experiment_dir + sub + '/wmL_mask.nii.gz'
        threshL.run()
        threshR = ImageMaths(op_string='-thr 41 -uthr 41')
        threshR.inputs.in_file = experiment_dir + sub + '/aseg_trans.nii.gz'
        threshR.inputs.out_file = experiment_dir + sub + '/wmR_mask.nii.gz'
        threshR.run()
        add_string = '-add ' + experiment_dir + sub + '/wmR_mask.nii.gz'
        wm = ImageMaths(op_string=add_string)
        wm.inputs.in_file = experiment_dir + sub + '/wmL_mask.nii.gz'
        wm.inputs.out_file = experiment_dir + sub + '/wm_mask.nii.gz'
        wm.run()
        mask_file = "wm_mask.nii.gz"
        os.remove('wmR_mask.nii.gz')
        os.remove('wmL_mask.nii.gz')
binarizeDilatedCompleteMask = Node(Binarize(min=0.1, binary_file='dilated_complete_mask.nii'),
                           name='binarizeDilatedCompleteMask')

applyVolTrans_cortical_mask = Node(ApplyVolTransform(reg_header=True,
                                  interp='nearest'),
                name='applyVolTrans_cortical_mask')

applyVolTrans_brain_mask = Node(ApplyVolTransform(reg_header=True,
                                  interp='nearest'),
                name='applyVolTrans_brain_mask')

applyVolTrans_complete_mask = Node(ApplyVolTransform(reg_header=True,
                                  interp='nearest'),
                name='applyVolTrans_complete_mask')

aparc_robust_BET_mask = MapNode(interface=ImageMaths(suffix='_ribbon_robust_BET',
                                               op_string='-mas', output_type='NIFTI', out_file='aparc_robust_BET.nii'),
                      iterfield=['in_file'],
                      name='aparc_robust_BET_mask')

# Create the mask pipeline
preproc_ALPACA_maskflow = Workflow(name='prepro_ALPACA_maskflow')

# Connect all components of the preprocessing workflow - aparc / ribbon mask 
preproc_ALPACA_maskflow.connect([(realign, meanfuncmask, [('mean_image', 'in_file')]), # create a skull stripped mask image from the mean functional
                 (binarizebrainmask, mriconvert_brain_mask, [('binary_file', 'in_file')]), # convert brainmask to nifti
                 (mriconvert_brain_mask, applyVolTrans_brain_mask, [('out_file', 'source_file')]), # transform brainmask to functional space
                 (realign, applyVolTrans_brain_mask, [('mean_image', 'target_file')]),
                 (applyVolTrans_brain_mask, dilate_brain_mask, [('transformed_file', 'in_file')]), # dilate transformed brainmask
                 (dilate_brain_mask, binarizeDilatedBrainMask, [('binary_file', 'in_file')]), # binarize dilated and transformed brainmask
                 (binarizeCortical, wholebrainmask, [('binary_file','in_file')]),
                 (binarizeSubcortical, wholebrainmask, [('binary_file', 'operand_file')]), # combine aparc_aseg and ribbon files to get a whole brain mask
Exemple #18
0
def canonical(subjects_participants, regdir, f2s,
	template = "~/GitHub/mriPipeline/templates/waxholm/WHS_SD_rat_T2star_v1.01_downsample3.nii.gz",
	f_file_format = "~/GitHub/mripipeline/base/preprocessing/generic_work/_subject_session_{subject}.{session}/_scan_type_SE_EPI/f_bru2nii/",
	s_file_format = "~/GitHub/mripipeline/base/preprocessing/generic_work/_subject_session_{subject}.{session}/_scan_type_T2_TurboRARE/s_bru2nii/",
	):

	"""Warp a functional image based on the functional-to-structural and the structural-to-template registrations.
	Currently this approach is failing because the functiona-to-structural registration pushes the brain stem too far down.
	This may be

	"""
	template = os.path.expanduser(template)
	for subject_participant in subjects_participants:
		func_image_dir = os.path.expanduser(f_file_format.format(**subject_participant))
		struct_image_dir = os.path.expanduser(s_file_format.format(**subject_participant))
		try:
			for myfile in os.listdir(func_image_dir):
				if myfile.endswith((".nii.gz", ".nii")):
					func_image = os.path.join(func_image_dir,myfile)
			for myfile in os.listdir(struct_image_dir):
				if myfile.endswith((".nii.gz", ".nii")):
					struct_image = os.path.join(struct_image_dir,myfile)
		except FileNotFoundError:
			pass
		else:
			#struct
			n4 = ants.N4BiasFieldCorrection()
			n4.inputs.dimension = 3
			n4.inputs.input_image = struct_image
			# correction bias is introduced (along the z-axis) if the following value is set to under 85. This is likely contingent on resolution.
			n4.inputs.bspline_fitting_distance = 100
			n4.inputs.shrink_factor = 2
			n4.inputs.n_iterations = [200,200,200,200]
			n4.inputs.convergence_threshold = 1e-11
			n4.inputs.output_image = '{}/ss_n4_{}_ofM{}.nii.gz'.format(regdir,participant,i)
			n4_res = n4.run()

			_n4 = ants.N4BiasFieldCorrection()
			_n4.inputs.dimension = 3
			_n4.inputs.input_image = struct_image
			# correction bias is introduced (along the z-axis) if the following value is set to under 85. This is likely contingent on resolution.
			_n4.inputs.bspline_fitting_distance = 95
			_n4.inputs.shrink_factor = 2
			_n4.inputs.n_iterations = [500,500,500,500]
			_n4.inputs.convergence_threshold = 1e-14
			_n4.inputs.output_image = '{}/ss__n4_{}_ofM{}.nii.gz'.format(regdir,participant,i)
			_n4_res = _n4.run()

			#we do this on a separate bias-corrected image to remove hyperintensities which we have to create in order to prevent brain regions being caught by the negative threshold
			struct_cutoff = ImageMaths()
			struct_cutoff.inputs.op_string = "-thrP 20 -uthrp 98"
			struct_cutoff.inputs.in_file = _n4_res.outputs.output_image
			struct_cutoff_res = struct_cutoff.run()

			struct_BET = BET()
			struct_BET.inputs.mask = True
			struct_BET.inputs.frac = 0.3
			struct_BET.inputs.robust = True
			struct_BET.inputs.in_file = struct_cutoff_res.outputs.out_file
			struct_BET_res = struct_BET.run()

			struct_mask = ApplyMask()
			struct_mask.inputs.in_file = n4_res.outputs.output_image
			struct_mask.inputs.mask_file = struct_BET_res.outputs.mask_file
			struct_mask_res = struct_mask.run()

			struct_registration = ants.Registration()
			struct_registration.inputs.fixed_image = template
			struct_registration.inputs.output_transform_prefix = "output_"
			struct_registration.inputs.transforms = ['Affine', 'SyN'] ##
			struct_registration.inputs.transform_parameters = [(1.0,), (1.0, 3.0, 5.0)] ##
			struct_registration.inputs.number_of_iterations = [[2000, 1000, 500], [100, 100, 100]] #
			struct_registration.inputs.dimension = 3
			struct_registration.inputs.write_composite_transform = True
			struct_registration.inputs.collapse_output_transforms = True
			struct_registration.inputs.initial_moving_transform_com = True
			# Tested on Affine transform: CC takes too long; Demons does not tilt, but moves the slices too far caudally; GC tilts too much on
			struct_registration.inputs.metric = ['MeanSquares', 'Mattes']
			struct_registration.inputs.metric_weight = [1, 1]
			struct_registration.inputs.radius_or_number_of_bins = [16, 32] #
			struct_registration.inputs.sampling_strategy = ['Random', None]
			struct_registration.inputs.sampling_percentage = [0.3, 0.3]
			struct_registration.inputs.convergence_threshold = [1.e-11, 1.e-8] #
			struct_registration.inputs.convergence_window_size = [20, 20]
			struct_registration.inputs.smoothing_sigmas = [[4, 2, 1], [4, 2, 1]]
			struct_registration.inputs.sigma_units = ['vox', 'vox']
			struct_registration.inputs.shrink_factors = [[3, 2, 1],[3, 2, 1]]
			struct_registration.inputs.use_estimate_learning_rate_once = [True, True]
			# if the fixed_image is not acquired similarly to the moving_image (e.g. RARE to histological (e.g. AMBMC)) this should be False
			struct_registration.inputs.use_histogram_matching = [False, False]
			struct_registration.inputs.winsorize_lower_quantile = 0.005
			struct_registration.inputs.winsorize_upper_quantile = 0.98
			struct_registration.inputs.args = '--float'
			struct_registration.inputs.num_threads = 6

			struct_registration.inputs.moving_image = struct_mask_res.outputs.out_file
			struct_registration.inputs.output_warped_image = '{}/s_{}_ofM{}.nii.gz'.format(regdir,participant,i)
			struct_registration_res = struct_registration.run()

			#func
			func_n4 = ants.N4BiasFieldCorrection()
			func_n4.inputs.dimension = 3
			func_n4.inputs.input_image = func_image
			func_n4.inputs.bspline_fitting_distance = 100
			func_n4.inputs.shrink_factor = 2
			func_n4.inputs.n_iterations = [200,200,200,200]
			func_n4.inputs.convergence_threshold = 1e-11
			func_n4.inputs.output_image = '{}/f_n4_{}_ofM{}.nii.gz'.format(regdir,participant,i)
			func_n4_res = func_n4.run()

			func_registration = ants.Registration()
			func_registration.inputs.fixed_image = n4_res.outputs.output_image
			func_registration.inputs.output_transform_prefix = "func_"
			func_registration.inputs.transforms = [f2s]
			func_registration.inputs.transform_parameters = [(0.1,)]
			func_registration.inputs.number_of_iterations = [[40, 20, 10]]
			func_registration.inputs.dimension = 3
			func_registration.inputs.write_composite_transform = True
			func_registration.inputs.collapse_output_transforms = True
			func_registration.inputs.initial_moving_transform_com = True
			func_registration.inputs.metric = ['MeanSquares']
			func_registration.inputs.metric_weight = [1]
			func_registration.inputs.radius_or_number_of_bins = [16]
			func_registration.inputs.sampling_strategy = ["Regular"]
			func_registration.inputs.sampling_percentage = [0.3]
			func_registration.inputs.convergence_threshold = [1.e-2]
			func_registration.inputs.convergence_window_size = [8]
			func_registration.inputs.smoothing_sigmas = [[4, 2, 1]] # [1,0.5,0]
			func_registration.inputs.sigma_units = ['vox']
			func_registration.inputs.shrink_factors = [[3, 2, 1]]
			func_registration.inputs.use_estimate_learning_rate_once = [True]
			func_registration.inputs.use_histogram_matching = [False]
			func_registration.inputs.winsorize_lower_quantile = 0.005
			func_registration.inputs.winsorize_upper_quantile = 0.995
			func_registration.inputs.args = '--float'
			func_registration.inputs.num_threads = 6

			func_registration.inputs.moving_image = func_n4_res.outputs.output_image
			func_registration.inputs.output_warped_image = '{}/f_{}_ofM{}.nii.gz'.format(regdir,participant,i)
			func_registration_res = func_registration.run()

			warp = ants.ApplyTransforms()
			warp.inputs.reference_image = template
			warp.inputs.input_image_type = 3
			warp.inputs.interpolation = 'Linear'
			warp.inputs.invert_transform_flags = [False, False]
			warp.inputs.terminal_output = 'file'
			warp.inputs.output_image = '{}/{}_ofM{}.nii.gz'.format(regdir,participant,i)
			warp.num_threads = 6

			warp.inputs.input_image = func_image
			warp.inputs.transforms = [func_registration_res.outputs.composite_transform, struct_registration_res.outputs.composite_transform]
			warp.run()
# <editor-fold desc="Scale phase data">
stats = MapNode(ImageStats(op_string='-R'),
                name='stats_node',
                iterfield=['in_file'])


def scale_to_pi(min_and_max):
    data_min = min_and_max[0][0]
    data_max = min_and_max[0][1]
    # TODO: Test at 3T with -4096 to + 4096 range
    return '-add %.10f -div %.10f -mul 6.28318530718 -sub 3.14159265359' % (
        data_min, data_max + data_min)


phs_range_n = MapNode(ImageMaths(),
                      name='phs_range_node',
                      iterfield=['in_file'])

wf.connect([(selectfiles, stats, [('phs', 'in_file')]),
            (selectfiles, phs_range_n, [('phs', 'in_file')]),
            (stats, phs_range_n, [(('out_stat', scale_to_pi), 'op_string')])])
# </editor-fold>

# <editor-fold desc="Read echotime and fieldstrenghts from json files ">


def read_json(in_file):
    import os
    te = 0.001
    b0 = 7
Exemple #20
0
def structural_per_participant_test(
    participant,
    conditions=["", "_aF", "_cF1", "_cF2", "_pF"],
    template="/home/chymera/ni_data/templates/ds_QBI_chr.nii.gz",
):

    for i in conditions:
        image_dir = "/home/chymera/ni_data/ofM.dr/preprocessing/generic_work/_subject_session_{}.ofM{}/_scan_type_T2_TurboRARE/s_bru2nii/".format(
            participant, i)
        print(image_dir)
        try:
            for myfile in os.listdir(image_dir):
                if myfile.endswith(".nii"):
                    mimage = os.path.join(image_dir, myfile)
        except FileNotFoundError:
            pass
        else:
            n4 = ants.N4BiasFieldCorrection()
            n4.inputs.dimension = 3
            n4.inputs.input_image = mimage
            # correction bias is introduced (along the z-axis) if the following value is set to under 85. This is likely contingent on resolution.
            n4.inputs.bspline_fitting_distance = 100
            n4.inputs.shrink_factor = 2
            n4.inputs.n_iterations = [200, 200, 200, 200]
            n4.inputs.convergence_threshold = 1e-11
            n4.inputs.output_image = 'ss_n4_{}_ofM{}.nii.gz'.format(
                participant, i)
            n4_res = n4.run()

            _n4 = ants.N4BiasFieldCorrection()
            _n4.inputs.dimension = 3
            _n4.inputs.input_image = mimage
            # correction bias is introduced (along the z-axis) if the following value is set to under 85. This is likely contingent on resolution.
            _n4.inputs.bspline_fitting_distance = 95
            _n4.inputs.shrink_factor = 2
            _n4.inputs.n_iterations = [500, 500, 500, 500]
            _n4.inputs.convergence_threshold = 1e-14
            _n4.inputs.output_image = 'ss__n4_{}_ofM{}.nii.gz'.format(
                participant, i)
            _n4_res = _n4.run()

            #we do this on a separate bias-corrected image to remove hyperintensities which we have to create in order to prevent brain regions being caught by the negative threshold
            struct_cutoff = ImageMaths()
            struct_cutoff.inputs.op_string = "-thrP 20 -uthrp 98"
            struct_cutoff.inputs.in_file = _n4_res.outputs.output_image
            struct_cutoff_res = struct_cutoff.run()

            struct_BET = BET()
            struct_BET.inputs.mask = True
            struct_BET.inputs.frac = 0.3
            struct_BET.inputs.robust = True
            struct_BET.inputs.in_file = struct_cutoff_res.outputs.out_file
            struct_BET_res = struct_BET.run()

            mask = ApplyMask()
            mask.inputs.in_file = n4_res.outputs.output_image
            mask.inputs.mask_file = struct_BET_res.outputs.mask_file
            mask_res = mask.run()

            struct_registration = ants.Registration()
            struct_registration.inputs.fixed_image = template
            struct_registration.inputs.output_transform_prefix = "output_"
            struct_registration.inputs.transforms = ['Rigid', 'Affine',
                                                     'SyN']  ##
            struct_registration.inputs.transform_parameters = [(.1, ), (1.0, ),
                                                               (1.0, 3.0, 5.0)
                                                               ]  ##
            struct_registration.inputs.number_of_iterations = [[
                150, 100, 50
            ], [2000, 1000, 500], [100, 100, 100]]  #
            struct_registration.inputs.dimension = 3
            struct_registration.inputs.write_composite_transform = True
            struct_registration.inputs.collapse_output_transforms = True
            struct_registration.inputs.initial_moving_transform_com = True
            # Tested on Affine transform: CC takes too long; Demons does not tilt, but moves the slices too far caudally; GC tilts too much on
            struct_registration.inputs.metric = [
                'MeanSquares', 'MeanSquares', 'Mattes'
            ]
            struct_registration.inputs.metric_weight = [1, 1, 1]
            struct_registration.inputs.radius_or_number_of_bins = [16, 16,
                                                                   32]  #
            struct_registration.inputs.sampling_strategy = [
                'Random', 'Random', None
            ]
            struct_registration.inputs.sampling_percentage = [0.3, 0.3, 0.3]
            struct_registration.inputs.convergence_threshold = [
                1.e-10, 1.e-11, 1.e-8
            ]  #
            struct_registration.inputs.convergence_window_size = [20, 20, 20]
            struct_registration.inputs.smoothing_sigmas = [[4, 2,
                                                            1], [4, 2, 1],
                                                           [4, 2, 1]]
            struct_registration.inputs.sigma_units = ['vox', 'vox', 'vox']
            struct_registration.inputs.shrink_factors = [[3, 2, 1], [3, 2, 1],
                                                         [3, 2, 1]]
            struct_registration.inputs.use_estimate_learning_rate_once = [
                True, True, True
            ]
            # if the fixed_image is not acquired similarly to the moving_image (e.g. RARE to histological (e.g. AMBMC)) this should be False
            struct_registration.inputs.use_histogram_matching = [
                False, False, False
            ]
            struct_registration.inputs.winsorize_lower_quantile = 0.005
            struct_registration.inputs.winsorize_upper_quantile = 0.98
            struct_registration.inputs.args = '--float'
            struct_registration.inputs.num_threads = 6

            struct_registration.inputs.moving_image = mask_res.outputs.out_file
            struct_registration.inputs.output_warped_image = 'ss_{}_ofM{}.nii.gz'.format(
                participant, i)
            res = struct_registration.run()
####################
## ants_brain_ext ##
####################
ants_be_n = MapNode(BrainExtraction(dimension=3, brain_template='/data/fasttemp/uqtshaw/tomcat/data/derivatives/myelin_mapping/T_template.nii.gz', brain_probability_mask='/data/fasttemp/uqtshaw/tomcat/data/derivatives/myelin_mapping/T_template_BrainCerebellumProbabilityMask.nii.gz'),
		name='ants_be_node', iterfield=['anatomical_image'])
wf.connect([(selectfiles, ants_be_n, [('t1w', 'anatomical_image')])]) 
############
## antsCT ##
############
antsct_n = MapNode(CorticalThickness(dimension=3, brain_template='/data/fastertemp/uqtshaw/ANTS/ants_ct_templates/T_template.nii.gz', brain_probability_mask='/data/fastertemp/uqtshaw/ANTS/ants_ct_templates/T_template_BrainCerebellumProbabilityMask.nii.gz', segmentation_priors=['/data/fastertemp/uqtshaw/ANTS/ants_ct_templates/Priors/priors1.nii.gz', '/data/fastertemp/uqtshaw/ANTS/ants_ct_templates/Priors/priors2.nii.gz', '/data/fastertemp/uqtshaw/ANTS/ants_ct_templates/Priors/priors3.nii.gz', '/data/fastertemp/uqtshaw/ANTS/ants_ct_templates/Priors/priors4.nii.gz', '/data/fastertemp/uqtshaw/ANTS/ants_ct_templates/Priors/priors5.nii.gz'], t1_registration_template='/data/fastertemp/uqtshaw/ANTS/ants_ct_templates/T_template_BrainCerebellum.nii.gz'),
              name='ants_node', iterfield=['anatomical_image'])
wf.connect([(selectfiles, antsct_n, [('t1w', 'anatomical_image')])]) 
###############
## mult_mask ##
###############
mult_mask_n_flair = MapNode(ImageMaths(op_string='-mul'),
                      name="mult_mask_flair", iterfield=['in_file', 'in_file2'])
wf.connect([(ants_be_n, mult_mask_n_flair, [('BrainExtractionMask', 'in_file')])])
wf.connect([(flirt_n_flair, mult_mask_n_flair, [('out_file', 'in_file2')])])
mult_mask_n_space = MapNode(ImageMaths(op_string='-mul'),
                      name="mult_mask_space", iterfield=['in_file', 'in_file2'])
wf.connect([(ants_be_n, mult_mask_n_space, [('BrainExtractionMask', 'in_file')])])
wf.connect([(flirt_n_space, mult_mask_n_space, [('out_file', 'in_file2')])])
###############
## N4 the T2 ##
###############
n4_n_flair = MapNode(N4BiasFieldCorrection(dimension=3, bspline_fitting_distance=300, shrink_factor=3, n_iterations=[50,50,30,20]),
                        name="n4_flair", iterfield=['input_image'])
wf.connect([(mult_mask_n_flair, n4_n_flair, [('out_file', 'input_image')])])
n4_n_space = MapNode(N4BiasFieldCorrection(dimension=3, bspline_fitting_distance=300, shrink_factor=3, n_iterations=[50,50,30,20]),
                        name="n4_space", iterfield=['input_image'])
Exemple #22
0
def amide_noe(zfrqs, name='Amide_NOE'):
    inputnode = Node(IdentityInterface(fields=['zspec_file', 'mask_file']),
                     name='inputnode')
    outputnode = Node(
        IdentityInterface(fields=['diff_file', 'DS', 'MT', 'Amide', 'NOE']),
        name='outputnode')
    # Repeat 2-pool fit
    f0_indices = (np.abs(zfrqs) > 9.9) | (np.abs(zfrqs) < 1.1)
    sequence = {
        'MTSat': {
            'pulse': {
                'p1': 0.4,
                'p2': 0.3,
                'bandwidth': 0.39
            },
            'Trf': 0.02,
            'TR': 4,
            'FA': 5,
            'sat_f0': zfrqs[f0_indices].tolist(),
            'sat_angle': np.repeat(180.0, len(f0_indices)).tolist()
        }
    }
    two_pools = [{
        'name': 'DS',
        'df0': [0, -2.5, 2.5],
        'fwhm': [1.0, 1.e-6, 3.0],
        'A': [0.2, 1.e-3, 1.0],
        'use_bandwidth': True
    }, {
        'name': 'MT',
        'df0': [-2.5, -5.0, -0.5],
        'fwhm': [50.0, 35.0, 200.0],
        'A': [0.3, 1.e-3, 1.0]
    }]
    backg_select = Node(Select(volumes=np.where(f0_indices)[0].tolist(),
                               out_file='bg_zspec.nii.gz'),
                        name='backg_select')
    backg_fit = Node(Lorentzian(sequence=sequence,
                                pools=two_pools,
                                verbose=True),
                     name='backg_fit')
    # Simulate data for all frequencies
    sequence['MTSat']['sat_f0'] = zfrqs.tolist()
    sequence['MTSat']['sat_angle'] = np.repeat(180.0, len(zfrqs)).tolist()
    backg_sim = Node(LorentzianSim(sequence=sequence,
                                   pools=two_pools,
                                   noise=0,
                                   in_file='backg_sim.nii.gz',
                                   verbose=True),
                     name='backg_sim')
    backg_sub = Node(ImageMaths(op_string='-sub',
                                out_file='no_backg_sub.nii.gz'),
                     name='backg_sub')

    an_pools = [{
        'name': 'Amide',
        'df0': [3.5, 2.0, 6.0],
        'fwhm': [2.0, 0.4, 4.0],
        'A': [0.2, 1.e-3, 0.2],
        'use_bandwidth': True
    }, {
        'name': 'NOE',
        'df0': [-4.0, -6.0, -2.0],
        'fwhm': [2.0, 0.4, 4.0],
        'A': [0.2, 1.e-3, 0.2],
        'use_bandwidth': True
    }]
    f0_indices = (np.abs(zfrqs) > 0.99) & (np.abs(zfrqs) < 10.1)
    sequence['MTSat']['sat_f0'] = zfrqs[f0_indices].tolist()
    sequence['MTSat']['sat_angle'] = np.repeat(180.0, len(f0_indices)).tolist()
    an_select = Node(Select(volumes=np.where(f0_indices)[0].tolist(),
                            out_file='fg_zspec.nii.gz'),
                     name='an_select')

    an_fit = Node(Lorentzian(sequence=sequence,
                             pools=an_pools,
                             Zref=0.0,
                             additive=True,
                             verbose=True),
                  name='an_pool')

    outputnode = Node(IdentityInterface(
        fields=['zspec', 'f0_map', 'mask', 'DS', 'MT', 'Amide', 'NOE']),
                      name='outputnode')
    wf = Workflow(name=name)
    wf.connect([
        (inputnode, backg_select, [('zspec_file', 'in_file')]),
        (backg_select, backg_fit, [('out_file', 'in_file')]),
        (inputnode, backg_fit, [('mask_file', 'mask_file')]),
        (backg_fit, backg_sim, [('DS_f0', 'DS_f0'), ('DS_fwhm', 'DS_fwhm'),
                                ('DS_A', 'DS_A'), ('MT_f0', 'MT_f0'),
                                ('MT_fwhm', 'MT_fwhm'), ('MT_A', 'MT_A')]),
        (inputnode, backg_sub, [('zspec_file', 'in_file2')]),
        (backg_sim, backg_sub, [('out_file', 'in_file')]),
        (backg_sub, an_select, [('out_file', 'in_file')]),
        (an_select, an_fit, [('out_file', 'in_file')]),
        (inputnode, an_fit, [('mask_file', 'mask_file')]),
        (backg_sub, outputnode, [('out_file', 'diff_file')]),
        (backg_fit, outputnode, [('DS_A', 'DS'), ('MT_A', 'MT')]),
        (an_fit, outputnode, [('Amide_A', 'Amide'), ('NOE_A', 'NOE')])
    ])
    return wf
Exemple #23
0
def structural_to_functional_per_participant_test(
    subjects_sessions,
    template="~/GitHub/mriPipeline/templates/waxholm/new/WHS_SD_masked.nii.gz",
    f_file_format="~/GitHub/mripipeline/base/preprocessing/generic_work/_subject_session_{subject}.{session}/_scan_type_SE_EPI/f_bru2nii/",
    s_file_format="~/GitHub/mripipeline/base/preprocessing/generic_work/_subject_session_{subject}.{session}/_scan_type_T2_TurboRARE/s_bru2nii/",
    num_threads=3,
):

    template = os.path.expanduser(template)
    for subject_session in subjects_sessions:
        func_image_dir = os.path.expanduser(
            f_file_format.format(**subject_session))
        struct_image_dir = os.path.expanduser(
            s_file_format.format(**subject_session))
        try:
            for myfile in os.listdir(func_image_dir):
                if myfile.endswith((".nii.gz", ".nii")):
                    func_image = os.path.join(func_image_dir, myfile)
            for myfile in os.listdir(struct_image_dir):
                if myfile.endswith((".nii.gz", ".nii")):
                    struct_image = os.path.join(struct_image_dir, myfile)
        except FileNotFoundError:
            pass
        else:
            n4 = ants.N4BiasFieldCorrection()
            n4.inputs.dimension = 3
            n4.inputs.input_image = struct_image
            # correction bias is introduced (along the z-axis) if the following value is set to under 85. This is likely contingent on resolution.
            n4.inputs.bspline_fitting_distance = 100
            n4.inputs.shrink_factor = 2
            n4.inputs.n_iterations = [200, 200, 200, 200]
            n4.inputs.convergence_threshold = 1e-11
            n4.inputs.output_image = '{}_{}_1_biasCorrection_forRegistration.nii.gz'.format(
                *subject_session.values())
            n4_res = n4.run()

            _n4 = ants.N4BiasFieldCorrection()
            _n4.inputs.dimension = 3
            _n4.inputs.input_image = struct_image
            # correction bias is introduced (along the z-axis) if the following value is set to under 85. This is likely contingent on resolution.
            _n4.inputs.bspline_fitting_distance = 95
            _n4.inputs.shrink_factor = 2
            _n4.inputs.n_iterations = [500, 500, 500, 500]
            _n4.inputs.convergence_threshold = 1e-14
            _n4.inputs.output_image = '{}_{}_1_biasCorrection_forMasking.nii.gz'.format(
                *subject_session.values())
            _n4_res = _n4.run()

            #we do this on a separate bias-corrected image to remove hyperintensities which we have to create in order to prevent brain regions being caught by the negative threshold
            struct_cutoff = ImageMaths()
            struct_cutoff.inputs.op_string = "-thrP 20 -uthrp 98"
            struct_cutoff.inputs.in_file = _n4_res.outputs.output_image
            struct_cutoff_res = struct_cutoff.run()

            struct_BET = BET()
            struct_BET.inputs.mask = True
            struct_BET.inputs.frac = 0.3
            struct_BET.inputs.robust = True
            struct_BET.inputs.in_file = struct_cutoff_res.outputs.out_file
            struct_BET.inputs.out_file = '{}_{}_2_brainExtraction.nii.gz'.format(
                *subject_session.values())
            struct_BET_res = struct_BET.run()

            # we need/can not apply a fill, because the "holes" if any, will be at the rostral edge (touching it, and thus not counting as holes)
            struct_mask = ApplyMask()
            struct_mask.inputs.in_file = n4_res.outputs.output_image
            struct_mask.inputs.mask_file = struct_BET_res.outputs.mask_file
            struct_mask.inputs.out_file = '{}_{}_3_brainMasked.nii.gz'.format(
                *subject_session.values())
            struct_mask_res = struct_mask.run()

            struct_registration = ants.Registration()
            struct_registration.inputs.fixed_image = template
            struct_registration.inputs.output_transform_prefix = "output_"
            struct_registration.inputs.transforms = ['Affine', 'SyN']  ##
            struct_registration.inputs.transform_parameters = [(1.0, ),
                                                               (1.0, 3.0, 5.0)
                                                               ]  ##
            struct_registration.inputs.number_of_iterations = [[
                2000, 1000, 500
            ], [100, 100, 100]]  #
            struct_registration.inputs.dimension = 3
            struct_registration.inputs.write_composite_transform = True
            struct_registration.inputs.collapse_output_transforms = True
            struct_registration.inputs.initial_moving_transform_com = True
            # Tested on Affine transform: CC takes too long; Demons does not tilt, but moves the slices too far caudally; GC tilts too much on
            struct_registration.inputs.metric = ['MeanSquares', 'Mattes']
            struct_registration.inputs.metric_weight = [1, 1]
            struct_registration.inputs.radius_or_number_of_bins = [16, 32]  #
            struct_registration.inputs.sampling_strategy = ['Random', None]
            struct_registration.inputs.sampling_percentage = [0.3, 0.3]
            struct_registration.inputs.convergence_threshold = [1.e-11,
                                                                1.e-8]  #
            struct_registration.inputs.convergence_window_size = [20, 20]
            struct_registration.inputs.smoothing_sigmas = [[4, 2, 1],
                                                           [4, 2, 1]]
            struct_registration.inputs.sigma_units = ['vox', 'vox']
            struct_registration.inputs.shrink_factors = [[3, 2, 1], [3, 2, 1]]
            struct_registration.inputs.use_estimate_learning_rate_once = [
                True, True
            ]
            # if the fixed_image is not acquired similarly to the moving_image (e.g. RARE to histological (e.g. AMBMC)) this should be False
            struct_registration.inputs.use_histogram_matching = [False, False]
            struct_registration.inputs.winsorize_lower_quantile = 0.005
            struct_registration.inputs.winsorize_upper_quantile = 0.98
            struct_registration.inputs.args = '--float'
            struct_registration.inputs.num_threads = num_threads

            struct_registration.inputs.moving_image = struct_mask_res.outputs.out_file
            struct_registration.inputs.output_warped_image = '{}_{}_4_structuralRegistration.nii.gz'.format(
                *subject_session.values())
            struct_registration_res = struct_registration.run()

            warp = ants.ApplyTransforms()
            warp.inputs.reference_image = template
            warp.inputs.input_image_type = 3
            warp.inputs.interpolation = 'Linear'
            warp.inputs.invert_transform_flags = [False]
            warp.inputs.terminal_output = 'file'
            warp.inputs.output_image = '{}_{}_5_functionalWarp.nii.gz'.format(
                *subject_session.values())
            warp.num_threads = num_threads

            warp.inputs.input_image = func_image
            warp.inputs.transforms = struct_registration_res.outputs.composite_transform
            warp.run()
Exemple #24
0
def prep(zfrqs, dummies=0, pca_retain=0, name='CEST_prep'):
    inputnode = Node(IdentityInterface(fields=['zspec_file', 'ref_file']),
                     name='inputnode')
    outputnode = Node(IdentityInterface(
        fields=['zspec_file', 'f0_map', 'mask_file', 'ref_file', 'DS', 'MT']),
                      name='outputnode')

    moco = Node(MCFLIRT(cost='mutualinfo', mean_vol=True), name='moco')
    mask = Node(BET(mask=True, no_output=True), name='mask')

    if (dummies > 0):
        ref_index = dummies - 1
        zspec_select = Node(Select(volumes=list(range(dummies, len(zfrqs))),
                                   out_file='zspec.nii.gz'),
                            name='zspec_select')
        zfrqs = np.array(zfrqs[dummies:])
    else:
        ref_index = 0
        zfrqs = np.array(zfrqs)

    zspec_ref = Node(Select(volumes=[
        ref_index,
    ], out_file='reference.nii.gz'),
                     name='zspec_ref')
    zspec_norm = Node(ImageMaths(op_string='-div', out_file='zspec.nii.gz'),
                      name='zspec_norm')

    f0_indices = (np.abs(zfrqs) > 7) | (np.abs(zfrqs) < 1.1)
    sat_frqs = zfrqs[f0_indices]
    sat_angles = np.repeat(180.0, len(f0_indices))
    f0_select = Node(Select(volumes=np.where(f0_indices)[0].tolist(),
                            out_file='background_zspec.nii.gz'),
                     name='f0_select')
    sequence = {
        'MTSat': {
            'pulse': {
                'p1': 0.4,
                'p2': 0.3,
                'bandwidth': 0.39
            },
            'Trf': 0.02,
            'TR': 4,
            'FA': 5,
            'sat_f0': sat_frqs.tolist(),
            'sat_angle': sat_angles.tolist()
        }
    }
    two_pools = [{
        'name': 'DS',
        'df0': [0, -2.5, 2.5],
        'fwhm': [1.0, 1.e-6, 3.0],
        'A': [0.2, 1.e-3, 1.0],
        'use_bandwidth': True
    }, {
        'name': 'MT',
        'df0': [-2.5, -5.0, -0.5],
        'fwhm': [50.0, 35.0, 200.0],
        'A': [0.3, 1.e-3, 1.0]
    }]
    f0_fit = Node(Lorentzian(sequence=sequence, pools=two_pools, verbose=True),
                  name='f0_fit')

    out_frqs = np.sort(zfrqs)
    f0_correct = Node(ZSpec(in_freqs=zfrqs.tolist(),
                            out_freqs=out_frqs.tolist(),
                            verbose=True),
                      name='f0_correct')

    prep = Workflow(name=name)
    prep.connect([(inputnode, moco, [('zspec_file', 'in_file'),
                                     ('ref_file', 'ref_file')]),
                  (moco, zspec_ref, [('out_file', 'in_file')]),
                  (moco, mask, [('mean_img', 'in_file')]),
                  (zspec_ref, zspec_norm, [('out_file', 'in_file2')]),
                  (zspec_norm, f0_select, [('out_file', 'in_file')]),
                  (f0_select, f0_fit, [('out_file', 'in_file')]),
                  (mask, f0_fit, [('mask_file', 'mask_file')]),
                  (zspec_norm, f0_correct, [('out_file', 'in_file')]),
                  (f0_fit, f0_correct, [('DS_f0', 'f0_map')]),
                  (mask, f0_correct, [('mask_file', 'mask_file')]),
                  (moco, outputnode, [('mean_img', 'ref_file')]),
                  (mask, outputnode, [('out_file', 'mask_file')]),
                  (f0_fit, outputnode, [('DS_f0', 'f0_map'), ('DS_A', 'DS'),
                                        ('MT_A', 'MT')])])
    if (dummies > 0):
        prep.connect([(moco, zspec_select, [('out_file', 'in_file')]),
                      (zspec_select, zspec_norm, [('out_file', 'in_file')])])
    else:
        prep.connect([(moco, zspec_norm, [('out_file', 'in_file')])])

    if pca_retain > 0:
        f0_pca = Node(PCA(retain=pca_retain, projections_file='proj.nii.gz'),
                      name='f0_pca')
        prep.connect([(f0_correct, f0_pca, [('out_file', 'in_file')]),
                      (f0_pca, outputnode, [('out_file', 'zspec_file')])])
    else:
        prep.connect([(f0_correct, outputnode, [('out_file', 'zspec_file')])])

    return (prep, out_frqs)
def group_multregress_openfmri(dataset_dir,
                               model_id=None,
                               task_id=None,
                               l1output_dir=None,
                               out_dir=None,
                               no_reversal=False,
                               plugin=None,
                               plugin_args=None,
                               flamemodel='flame1',
                               nonparametric=False,
                               use_spm=False):

    meta_workflow = Workflow(name='mult_regress')
    meta_workflow.base_dir = work_dir
    for task in task_id:
        task_name = get_taskname(dataset_dir, task)
        cope_ids = l1_contrasts_num(model_id, task_name, dataset_dir)
        regressors_needed, contrasts, groups, subj_list = get_sub_vars(
            dataset_dir, task_name, model_id)
        for idx, contrast in enumerate(contrasts):
            wk = Workflow(name='model_%03d_task_%03d_contrast_%s' %
                          (model_id, task, contrast[0][0]))

            info = Node(util.IdentityInterface(
                fields=['model_id', 'task_id', 'dataset_dir', 'subj_list']),
                        name='infosource')
            info.inputs.model_id = model_id
            info.inputs.task_id = task
            info.inputs.dataset_dir = dataset_dir

            dg = Node(DataGrabber(infields=['model_id', 'task_id', 'cope_id'],
                                  outfields=['copes', 'varcopes']),
                      name='grabber')
            dg.inputs.template = os.path.join(
                l1output_dir,
                'model%03d/task%03d/%s/%scopes/%smni/%scope%02d.nii%s')
            if use_spm:
                dg.inputs.template_args['copes'] = [[
                    'model_id', 'task_id', subj_list, '', 'spm/', '',
                    'cope_id', ''
                ]]
                dg.inputs.template_args['varcopes'] = [[
                    'model_id', 'task_id', subj_list, 'var', 'spm/', 'var',
                    'cope_id', '.gz'
                ]]
            else:
                dg.inputs.template_args['copes'] = [[
                    'model_id', 'task_id', subj_list, '', '', '', 'cope_id',
                    '.gz'
                ]]
                dg.inputs.template_args['varcopes'] = [[
                    'model_id', 'task_id', subj_list, 'var', '', 'var',
                    'cope_id', '.gz'
                ]]
            dg.iterables = ('cope_id', cope_ids)
            dg.inputs.sort_filelist = False

            wk.connect(info, 'model_id', dg, 'model_id')
            wk.connect(info, 'task_id', dg, 'task_id')

            model = Node(MultipleRegressDesign(), name='l2model')
            model.inputs.groups = groups
            model.inputs.contrasts = contrasts[idx]
            model.inputs.regressors = regressors_needed[idx]

            mergecopes = Node(Merge(dimension='t'), name='merge_copes')
            wk.connect(dg, 'copes', mergecopes, 'in_files')

            if flamemodel != 'ols':
                mergevarcopes = Node(Merge(dimension='t'),
                                     name='merge_varcopes')
                wk.connect(dg, 'varcopes', mergevarcopes, 'in_files')

            mask_file = fsl.Info.standard_image(
                'MNI152_T1_2mm_brain_mask.nii.gz')
            flame = Node(FLAMEO(), name='flameo')
            flame.inputs.mask_file = mask_file
            flame.inputs.run_mode = flamemodel
            #flame.inputs.infer_outliers = True

            wk.connect(model, 'design_mat', flame, 'design_file')
            wk.connect(model, 'design_con', flame, 't_con_file')
            wk.connect(mergecopes, 'merged_file', flame, 'cope_file')
            if flamemodel != 'ols':
                wk.connect(mergevarcopes, 'merged_file', flame,
                           'var_cope_file')
            wk.connect(model, 'design_grp', flame, 'cov_split_file')

            if nonparametric:
                palm = Node(Function(input_names=[
                    'cope_file', 'design_file', 'contrast_file', 'group_file',
                    'mask_file', 'cluster_threshold'
                ],
                                     output_names=['palm_outputs'],
                                     function=run_palm),
                            name='palm')
                palm.inputs.cluster_threshold = 3.09
                palm.inputs.mask_file = mask_file
                palm.plugin_args = {
                    'sbatch_args': '-p om_all_nodes -N1 -c2 --mem=10G',
                    'overwrite': True
                }
                wk.connect(model, 'design_mat', palm, 'design_file')
                wk.connect(model, 'design_con', palm, 'contrast_file')
                wk.connect(mergecopes, 'merged_file', palm, 'cope_file')
                wk.connect(model, 'design_grp', palm, 'group_file')

            smoothest = Node(SmoothEstimate(), name='smooth_estimate')
            wk.connect(flame, 'zstats', smoothest, 'zstat_file')
            smoothest.inputs.mask_file = mask_file

            cluster = Node(Cluster(), name='cluster')
            wk.connect(smoothest, 'dlh', cluster, 'dlh')
            wk.connect(smoothest, 'volume', cluster, 'volume')
            cluster.inputs.connectivity = 26
            cluster.inputs.threshold = 2.3
            cluster.inputs.pthreshold = 0.05
            cluster.inputs.out_threshold_file = True
            cluster.inputs.out_index_file = True
            cluster.inputs.out_localmax_txt_file = True

            wk.connect(flame, 'zstats', cluster, 'in_file')

            ztopval = Node(ImageMaths(op_string='-ztop', suffix='_pval'),
                           name='z2pval')
            wk.connect(flame, 'zstats', ztopval, 'in_file')

            sinker = Node(DataSink(), name='sinker')
            sinker.inputs.base_directory = os.path.join(
                out_dir, 'task%03d' % task, contrast[0][0])
            sinker.inputs.substitutions = [('_cope_id', 'contrast'),
                                           ('_maths_', '_reversed_')]

            wk.connect(flame, 'zstats', sinker, 'stats')
            wk.connect(cluster, 'threshold_file', sinker, 'stats.@thr')
            wk.connect(cluster, 'index_file', sinker, 'stats.@index')
            wk.connect(cluster, 'localmax_txt_file', sinker, 'stats.@localmax')
            if nonparametric:
                wk.connect(palm, 'palm_outputs', sinker, 'stats.palm')

            if not no_reversal:
                zstats_reverse = Node(BinaryMaths(), name='zstats_reverse')
                zstats_reverse.inputs.operation = 'mul'
                zstats_reverse.inputs.operand_value = -1
                wk.connect(flame, 'zstats', zstats_reverse, 'in_file')

                cluster2 = cluster.clone(name='cluster2')
                wk.connect(smoothest, 'dlh', cluster2, 'dlh')
                wk.connect(smoothest, 'volume', cluster2, 'volume')
                wk.connect(zstats_reverse, 'out_file', cluster2, 'in_file')

                ztopval2 = ztopval.clone(name='ztopval2')
                wk.connect(zstats_reverse, 'out_file', ztopval2, 'in_file')

                wk.connect(zstats_reverse, 'out_file', sinker, 'stats.@neg')
                wk.connect(cluster2, 'threshold_file', sinker,
                           'stats.@neg_thr')
                wk.connect(cluster2, 'index_file', sinker, 'stats.@neg_index')
                wk.connect(cluster2, 'localmax_txt_file', sinker,
                           'stats.@neg_localmax')
            meta_workflow.add_nodes([wk])
    return meta_workflow
Exemple #26
0
def init_mpm_wf(me_params, mtsat_params):
    inputnode = Node(IdentityInterface(fields=[
        'PDw_file', 'T1w_file', 'MTw_file', 'PDw_cal', 'T1w_cal', 'MTw_cal',
        'B1_map'
    ]),
                     name='inputnode')
    outputnode = Node(
        IdentityInterface(fields=['PD_map', 'R1_map', 'R2s_map', 'mtsat_map']),
        name='outputnode')

    wf = Workflow(name='Multi-Parametric-Mapping')

    bet = Node(BET(mask=True, no_output=True), name='brain_mask')
    wf.connect([(inputnode, bet, [('T1w_file', 'in_file')])])

    PD_B1minus = Node(B1Minus(), name='PD_B1minus')
    t1_B1minus = Node(B1Minus(), name='t1_B1minus')
    mt_B1minus = Node(B1Minus(), name='mt_B1minus')
    wf.connect([(inputnode, PD_B1minus, [('PDw_cal', 'in_file')]),
                (inputnode, t1_B1minus, [('T1w_cal', 'in_file')]),
                (inputnode, mt_B1minus, [('MTw_cal', 'in_file')])])

    pd0 = Node(Select(volumes=[
        0,
    ], out_file='pd0.nii.gz'), name='pd0')
    t10 = Node(Select(volumes=[
        0,
    ], out_file='t10.nii.gz'), name='t10')
    mt0 = Node(Select(volumes=[
        0,
    ], out_file='mt0.nii.gz'), name='mt0')
    wf.connect([(inputnode, pd0, [('PDw_file', 'in_file')]),
                (inputnode, t10, [('T1w_file', 'in_file')]),
                (inputnode, mt0, [('MTw_file', 'in_file')])])

    PD_B1m_hires = Node(FLIRT(apply_xfm=True, uses_qform=True),
                        name='PD_B1m_hires')
    t1_B1m_hires = Node(FLIRT(apply_xfm=True, uses_qform=True),
                        name='t1_B1m_hires')
    mt_B1m_hires = Node(FLIRT(apply_xfm=True, uses_qform=True),
                        name='mt_B1m_hires')
    wf.connect([(PD_B1minus, PD_B1m_hires, [('out_file', 'in_file')]),
                (pd0, PD_B1m_hires, [('out_file', 'reference')]),
                (t1_B1minus, t1_B1m_hires, [('out_file', 'in_file')]),
                (t10, t1_B1m_hires, [('out_file', 'reference')]),
                (mt_B1minus, mt_B1m_hires, [('out_file', 'in_file')]),
                (mt0, mt_B1m_hires, [('out_file', 'reference')])])

    pd_cal = Node(ImageMaths(op_string='-div'),
                  name='pd_cal',
                  iterfield=['in_file'])
    t1_cal = Node(ImageMaths(op_string='-div'),
                  name='t1_cal',
                  iterfield=['in_file'])
    mt_cal = Node(ImageMaths(op_string='-div'),
                  name='mt_cal',
                  iterfield=['in_file'])
    wf.connect([(inputnode, pd_cal, [('PDw_file', 'in_file')]),
                (PD_B1m_hires, pd_cal, [('out_file', 'in_file2')]),
                (inputnode, t1_cal, [('T1w_file', 'in_file')]),
                (t1_B1m_hires, t1_cal, [('out_file', 'in_file2')]),
                (inputnode, mt_cal, [('MTw_file', 'in_file')]),
                (mt_B1m_hires, mt_cal, [('out_file', 'in_file2')])])

    t1_reg = Node(FLIRT(uses_qform=True, cost='mutualinfo'), name='t1_reg')
    mt_reg = Node(FLIRT(uses_qform=True, cost='mutualinfo'), name='mt_reg')
    t1_apply = Node(ApplyXfm4D(single_matrix=True), name='t1_apply')
    mt_apply = Node(ApplyXfm4D(single_matrix=True), name='mt_apply')
    wf.connect([(t10, t1_reg, [('out_file', 'in_file')]),
                (pd0, t1_reg, [('out_file', 'reference')]),
                (t1_cal, t1_apply, [('out_file', 'in_file')]),
                (pd0, t1_apply, [('out_file', 'ref_vol')]),
                (t1_reg, t1_apply, [('out_matrix_file', 'trans_file')]),
                (mt0, mt_reg, [('out_file', 'in_file')]),
                (pd0, mt_reg, [('out_file', 'reference')]),
                (mt_cal, mt_apply, [('out_file', 'in_file')]),
                (pd0, mt_apply, [('out_file', 'ref_vol')]),
                (mt_reg, mt_apply, [('out_matrix_file', 'trans_file')])])

    mpm = Node(MPMR2s(sequence=me_params, verbose=True), name='MPM_R2s')
    mtsat = Node(MTSat(sequence=mtsat_params, verbose=True), name='MPM_MTSat')

    wf.connect([(pd_cal, mpm, [('out_file', 'PDw_file')]),
                (t1_apply, mpm, [('out_file', 'T1w_file')]),
                (mt_apply, mpm, [('out_file', 'MTw_file')]),
                (bet, mpm, [('mask_file', 'mask_file')]),
                (mpm, mtsat, [('S0_PDw_map', 'PDw_file'),
                              ('S0_T1w_map', 'T1w_file'),
                              ('S0_MTw_map', 'MTw_file')]),
                (inputnode, mtsat, [('B1_map', 'B1_map')]),
                (bet, mtsat, [('mask_file', 'mask_file')]),
                (mpm, outputnode, [('R2s_map', 'R2s_map')]),
                (mtsat, outputnode, [('PD_map', 'PD_map'),
                                     ('R1_map', 'R1_map'),
                                     ('delta_map', 'mtsat_map')])])
    return wf
Exemple #27
0
import nipype.interfaces.fsl as fsl
from nipype.interfaces.fsl import ImageStats
from nipype.interfaces.fsl import ImageMaths
import numpy as np
from scipy import stats

sublist = range(101,123) # subject nr list 
seglist = range(1,11) # range(1,11) nr of segments = (1 to 10) in this example there are 10 targets.


# make all the segments first
for i in sublist:
    for j in seglist: 

      #get segment (note out_file contains L because this example is left striatum only)
      ImageMaths(in_file=('ITC%sbig.nii.gz' % i), out_file=('%(1)s_L_%(2)s_seg.nii.gz' % {"1" : j, "2" : i}), op_string=('-thr %(1)s -uthr %(2)s' % {"1" : j, "2" : j})).run()
    
      # binarize
      ImageMaths(in_file=('%(1)s_L_%(2)s_seg.nii.gz' % {"1" : j, "2" : i}), out_file=('%(1)s_L_%(2)s_seg.nii.gz' % {"1" : j, "2" : i}), op_string=("-bin")).run()

# now calc average DICE per segement per Subject
for i in sublist:
    for j in seglist: 
      # get volume A
      stats = ImageStats(in_file=('%(1)s_L_%(2)s_seg.nii.gz' % {"1" : j, "2" : i}), op_string='-V').run()
      volume_A = stats.outputs.out_stat[1]
      
      # get for intersection for one area each other subject
      # first make sublist of participants not including self
      sect_list = filter (lambda a: a != i, sublist)
      Dice_list= [0]*(len(sect_list))
def get_wf_tissue_masks(name='wf_tissue_masks'):
    '''
    This Function gives a workflow that resamples the T1 brains, extracts the
    tissue types thresholds at 0.5 and registers them to T2* space
    It then registers the tissue priors to the T2* space and then performs a
    bitwise AND between two maps.
    '''
    # csf_tissue_prior_path, gm_tissue_prior_path, wm_tissue_prior_path,
    # threshold = 0.5

    wf_tissue_masks = Workflow(name=name)

    inputspec = Node(IdentityInterface(fields=[
        'resampled_anat_file_path', 'func2anat_mat_path', 'std2func_mat_path',
        'reference_func_file_path', 'brain_mask_eroded', 'threshold'
    ]),
                     name="inputspec")

    # FSL FAST node to segment the T1 brain
    fast = Node(FAST(out_basename='fast_'), name='fast')
    # probability_maps=True,segments=True,
    wf_tissue_masks.connect(inputspec, 'resampled_anat_file_path', fast,
                            'in_files')

    #  Invert the func2anat matrix to get anat2func
    inv_mat = Node(ConvertXFM(invert_xfm=True), name='inv_mat')
    wf_tissue_masks.connect(inputspec, 'func2anat_mat_path', inv_mat,
                            'in_file')

    # Transform the above segmented tissue masks to the functional space using the inverse matrix
    anat2func_xform_csf = Node(FLIRT(output_type='NIFTI',
                                     apply_xfm=True,
                                     interp='sinc'),
                               name='anat2func_xform_csf')

    wf_tissue_masks.connect(inputspec, 'reference_func_file_path',
                            anat2func_xform_csf, 'reference')
    wf_tissue_masks.connect(inv_mat, 'out_file', anat2func_xform_csf,
                            'in_matrix_file')

    anat2func_xform_wm = Node(FLIRT(output_type='NIFTI',
                                    apply_xfm=True,
                                    interp='sinc'),
                              name='anat2func_xform_wm')
    wf_tissue_masks.connect(inputspec, 'reference_func_file_path',
                            anat2func_xform_wm, 'reference')
    wf_tissue_masks.connect(inv_mat, 'out_file', anat2func_xform_wm,
                            'in_matrix_file')

    std2func_xform_eroded_brain = Node(FLIRT(output_type='NIFTI',
                                             apply_xfm=True,
                                             interp='nearestneighbour'),
                                       name='std2func_xform_eroded_brain')
    wf_tissue_masks.connect(inputspec, 'reference_func_file_path',
                            std2func_xform_eroded_brain, 'reference')
    wf_tissue_masks.connect(inputspec, 'std2func_mat_path',
                            std2func_xform_eroded_brain, 'in_matrix_file')

    def select_item_from_array(arr, index=0):
        import numpy as np
        arr = np.array(arr)
        return arr[index]

    wf_tissue_masks.connect(
        fast, ('partial_volume_files', select_item_from_array, 0),
        anat2func_xform_csf, 'in_file')
    wf_tissue_masks.connect(
        fast, ('partial_volume_files', select_item_from_array, 2),
        anat2func_xform_wm, 'in_file')
    wf_tissue_masks.connect(inputspec, 'brain_mask_eroded',
                            std2func_xform_eroded_brain, 'in_file')

    # Threshold

    def get_opstring(threshold):
        op = '-thr ' + str(threshold) + ' -bin'
        return op

    # print(inputspec.outputs)
    # ----- CSF ------

    threshold_csf = Node(interface=ImageMaths(suffix='_thresh'),
                         name='threshold_csf')
    # threshold_csf.inputs.op_string = '-thresh '+str(inputspec.outputs.threshold)+' -bin'
    wf_tissue_masks.connect(inputspec, ('threshold', get_opstring),
                            threshold_csf, 'op_string')
    wf_tissue_masks.connect(anat2func_xform_csf, 'out_file', threshold_csf,
                            'in_file')

    # ------- GM --------

    # threshold_gm = Node(interface=ImageMaths(op_string='-thresh',
    #                                             suffix='_thresh'),
    #                    name='threshold_gm')
    #
    #
    # wf_tissue_priors.connect(inputspec, ('threshold', get_opstring), threshold_gm, 'op_string' )
    # wf_tissue_priors.connect(fast, partial_volume_map[1], threshold_gm, 'in_file')
    #
    # -------- WM --------

    threshold_wm = Node(interface=ImageMaths(suffix='_thresh'),
                        name='threshold_wm')
    wf_tissue_masks.connect(inputspec, ('threshold', get_opstring),
                            threshold_wm, 'op_string')
    wf_tissue_masks.connect(anat2func_xform_wm, 'out_file', threshold_wm,
                            'in_file')

    #  -------------------

    #
    # wf_tissue_masks.connect(threshold_csf, 'out_file', std2func_xform_csf, 'in_file')
    # wf_tissue_masks.connect(threshold_wm, 'out_file', std2func_xform_wm, 'in_file')

    # Masking the outer brain CSF

    csf_mask = Node(interface=ApplyMask(), name='csf_mask')
    wf_tissue_masks.connect(threshold_csf, 'out_file', csf_mask, 'in_file')
    wf_tissue_masks.connect(std2func_xform_eroded_brain, 'out_file', csf_mask,
                            'mask_file')

    # Masking the outer brain WM that might be present due to poor BET

    wm_mask = Node(interface=ApplyMask(), name='wm_mask')
    wf_tissue_masks.connect(threshold_wm, 'out_file', wm_mask, 'in_file')
    wf_tissue_masks.connect(std2func_xform_eroded_brain, 'out_file', wm_mask,
                            'mask_file')

    # wm_mask = Node(interface=ApplyMask(),
    #                    name='wm_mask')
    # wf_tissue_masks.connect(std2func_xform_wm, 'out_file', wm_mask, 'in_file')
    # wf_tissue_masks.connect(std2func_xform_wm_prior, 'out_file', wm_mask, 'mask_file')

    outputspec = Node(IdentityInterface(fields=['csf_mask', 'wm_mask']),
                      name="outputspec")

    wf_tissue_masks.connect(csf_mask, 'out_file', outputspec, 'csf_mask')
    # wf_tissue_priors.connect(threshold_gm, 'out_file', outputspec, 'gm_tissue_prior_path')
    wf_tissue_masks.connect(wm_mask, 'out_file', outputspec, 'wm_mask')

    return wf_tissue_masks
Exemple #29
0
def init_hcp_segment_anat_wf(name='hcp_segment_anat_wf'):
    """
    This workflow generates WM, CSF and GM masks using the same 
    pipeline from the HCP pipeline. The wm and csf masks are created by 
    importing the appropriate freesurfer parcellations from the wmparc
    file and subtracting out the voxel marked as gm.

    **Paramters**

        name
            Name for the workflow hierarchy of Niype

    **Inputs**
        brainmask_fs
            freesurfer brain in MNI space with anat dims
        l_atlasroi
            left vertex mask
        l_midthickness
            left midthickness surface
        l_white
            left white matter surface
        l_pial
            left pial surface
        r_atlasroi
            right vertex mask
        r_midthickness
            right midthickness surface
        r_white
            right white matter surface
        r_pial
            right pial surface
        wmparc
            gyral white matter segmentation generated by freesurfer warped 
            MNI space
        ROIs
            subcortical parcellation of brain warped into MNI space

    **Outputs**
        brain_gm_mask
            whole brain gm mask dilated twice in MNI 2mm space brainmasked
        cortical_gm_mask
            coritcal gm mask in MNI 2mm space brainmasked
        subcortical_gm_mask
            subcortical gm mask in MNI 2mm space brainmasked
        wm_mask
            white matter mask in MNI 2mm space brainmasked
        csf_mask
            csf mask in MNI 2mm space brainmasked
        
    """
    wf = Workflow(name='hcp_segment')
    with pkg_resources.path(data, 'FreeSurferCSFRegLut.txt') as tmp:
        csf_rois = str(tmp)
    with pkg_resources.path(data, 'FreeSurferWMRegLut.txt') as tmp:
        wm_rois = str(tmp)
    with pkg_resources.path(data, 'fsl_identity.mat') as tmp:
        fsl_identity = str(tmp)

    inputnode = Node(IdentityInterface(
        fields=['brainmask_fs', 'wmparc', 'l_atlasroi', 'l_midthickness', 'l_white', 
                'l_pial', 'r_atlasroi', 'r_midthickness', 'r_white', 'r_pial', 
                'ROIs']),
        name='inputnode')
    outputnode = Node(IdentityInterface(
        fields=['cort_gm_mask', 'subcort_gm_mask', 'gm_mask', 
                'wm_mask', 'csf_mask']),
        name='outputnode')

    # resample gm mask to 2mm MNI space
    resample_mask = Node(ApplyWarp(relwarp=True, interp='nn', 
                         premat=fsl_identity, out_file='brainmask_fs.2.nii.gz'),
                    name='resample_mask')

    # gm mask nodes
    l_gm_mask = Node(wb.MetricToVolumeMappingRC(out_file='l_gm_mask.nii.gz'), 
                     name='l_gm_mask')
    r_gm_mask = Node(wb.MetricToVolumeMappingRC(out_file='r_gm_mask.nii.gz'),
                     name='r_gm_mask')
    cort_gm = Node(ImageMaths(out_file='cortical_gm.nii.gz',
                              op_string='-add',
                              args='-bin'),
                   name='cort_gm')
    cort_gm_mask = Node(ImageMaths(out_file='cortical_gm_mask.nii.gz',
                                   op_string='-mul'),
                        name='cort_gm_mask')
    subcort_gm_mask = Node(ImageMaths(out_file='subcortical_gm_mask.nii.gz',
                                      op_string='-mul',
                                      args='-bin'),
                           name='subcort_gm_mask')
    brain_gm_mask = Node(ImageMaths(out_file='brain_gm_mask.nii.gz',
                                    op_string='-add',
                                    args='-dilD -dilD'),
                         name='brain_gm_mask')

    # wm mask nodes
    wm_vol = Node(wb.VolumeLabelImport(out_file='wm_vol.nii.gz',
                                       label_list_file=wm_rois,
                                       discard_others=True,
                                       drop_unused_labels=True),
                  name='wm_vol')
    wm = Node(ImageMaths(out_file='wm.nii.gz',
                         op_string='-bin -sub',
                         args='-bin'),
                    name='wm')
    wm_mask = Node(ImageMaths(out_file='wm_mask.nii.gz',
                              op_string='-mul',
                              args='-bin'),
                    name='wm_mask')
    
    # csf mask nodes
    csf_vol = Node(wb.VolumeLabelImport(out_file='csf_vol.nii.gz',
                                        label_list_file=csf_rois,
                                        discard_others=True,
                                        drop_unused_labels=True),
                   name='csf_vol')
    csf = Node(ImageMaths(out_file='csf.nii.gz',
                          op_string='-bin -sub',
                          args='-bin'),
                     name='csf')
    csf_mask = Node(ImageMaths(out_file='csf_mask.nii.gz',
                               op_string='-mul',
                               args='-bin'),
                    name='csf_mask')


    wf.connect([
        # gm
        (inputnode, resample_mask, [('brainmask_fs', 'in_file'),
                                    ('wmparc', 'ref_file')]),
        (inputnode, l_gm_mask, [('l_atlasroi', 'in_file'),
                                ('l_midthickness', 'surface'),
                                ('wmparc', 'volume_space'),
                                ('l_pial', 'inner_surf'),
                                ('l_white', 'outer_surf')]),
        (inputnode, r_gm_mask, [('r_atlasroi', 'in_file'),
                                ('r_midthickness', 'surface'),
                                ('wmparc', 'volume_space'),
                                ('r_pial', 'inner_surf'),
                                ('r_white', 'outer_surf')]),
        (l_gm_mask, cort_gm, [('out_file', 'in_file')]),
        (r_gm_mask, cort_gm, [('out_file', 'in_file2')]),
        (cort_gm, cort_gm_mask, [('out_file', 'in_file')]),
        (resample_mask, cort_gm_mask, [('out_file', 'in_file2')]),
        (inputnode, subcort_gm_mask, [('ROIs', 'in_file')]),
        (resample_mask, subcort_gm_mask, [('out_file', 'in_file2')]),
        (cort_gm_mask, brain_gm_mask, [('out_file', 'in_file')]),
        (subcort_gm_mask, brain_gm_mask, [('out_file', 'in_file2')]),
        # wm
        (inputnode, wm_vol, [('wmparc', 'in_file')]),
        (wm_vol, wm, [('out_file', 'in_file')]),
        (brain_gm_mask, wm, [('out_file', 'in_file2')]),
        (wm, wm_mask, [('out_file', 'in_file')]),
        (resample_mask, wm_mask, [('out_file', 'in_file2')]),
        # csf
        (inputnode, csf_vol, [('wmparc', 'in_file')]),
        (csf_vol, csf, [('out_file', 'in_file')]),
        (brain_gm_mask, csf, [('out_file', 'in_file2')]),
        (csf, csf_mask, [('out_file', 'in_file')]),
        (resample_mask, csf_mask, [('out_file', 'in_file2')]),
        # output
        (cort_gm_mask, outputnode, [('out_file', 'cort_gm_mask')]),
        (subcort_gm_mask, outputnode, [('out_file', 'subcort_gm_mask')]),
        (brain_gm_mask, outputnode, [('out_file', 'brain_gm_mask')]),
        (wm_mask, outputnode, [('out_file', 'wm_mask')]),
        (csf_mask, outputnode, [('out_file', 'csf_mask')])
    ])

    return wf
Exemple #30
0
def functional_per_participant_test():
	for i in ["","_aF","_cF1","_cF2","_pF"]:
		template = "~/ni_data/templates/ds_QBI_chr.nii.gz"
		participant = "4008"
		image_dir = "~/ni_data/ofM.dr/preprocessing/generic_work/_subject_session_{}.ofM{}/_scan_type_7_EPI_CBV/temporal_mean/".format(participant,i)
		try:
			for myfile in os.listdir(image_dir):
				if myfile.endswith(".nii.gz"):
					mimage = os.path.join(image_dir,myfile)
		except FileNotFoundError:
			pass
		else:
			n4 = ants.N4BiasFieldCorrection()
			n4.inputs.dimension = 3
			n4.inputs.input_image = mimage
			n4.inputs.bspline_fitting_distance = 100
			n4.inputs.shrink_factor = 2
			n4.inputs.n_iterations = [200,200,200,200]
			n4.inputs.convergence_threshold = 1e-11
			n4.inputs.output_image = 'n4_{}_ofM{}.nii.gz'.format(participant,i)
			n4_res = n4.run()

			functional_cutoff = ImageMaths()
			functional_cutoff.inputs.op_string = "-thrP 30"
			functional_cutoff.inputs.in_file = n4_res.outputs.output_image
			functional_cutoff_res = functional_cutoff.run()

			functional_BET = BET()
			functional_BET.inputs.mask = True
			functional_BET.inputs.frac = 0.5
			functional_BET.inputs.in_file = functional_cutoff_res.outputs.out_file
			functional_BET_res = functional_BET.run()

			registration = ants.Registration()
			registration.inputs.fixed_image = template
			registration.inputs.output_transform_prefix = "output_"
			registration.inputs.transforms = ['Affine', 'SyN']
			registration.inputs.transform_parameters = [(0.1,), (3.0, 3.0, 5.0)]
			registration.inputs.number_of_iterations = [[10000, 10000, 10000], [100, 100, 100]]
			registration.inputs.dimension = 3
			registration.inputs.write_composite_transform = True
			registration.inputs.collapse_output_transforms = True
			registration.inputs.initial_moving_transform_com = True
			registration.inputs.metric = ['Mattes'] * 2 + [['Mattes', 'CC']]
			registration.inputs.metric_weight = [1] * 2 + [[0.5, 0.5]]
			registration.inputs.radius_or_number_of_bins = [32] * 2 + [[32, 4]]
			registration.inputs.sampling_strategy = ['Regular'] * 2 + [[None, None]]
			registration.inputs.sampling_percentage = [0.3] * 2 + [[None, None]]
			registration.inputs.convergence_threshold = [1.e-8] * 2 + [-0.01]
			registration.inputs.convergence_window_size = [20] * 2 + [5]
			registration.inputs.smoothing_sigmas = [[4, 2, 1]] * 2 + [[1, 0.5, 0]]
			registration.inputs.sigma_units = ['vox'] * 3
			registration.inputs.shrink_factors = [[3, 2, 1]]*2 + [[4, 2, 1]]
			registration.inputs.use_estimate_learning_rate_once = [True] * 3
			registration.inputs.use_histogram_matching = [False] * 2 + [True]
			registration.inputs.winsorize_lower_quantile = 0.005
			registration.inputs.winsorize_upper_quantile = 0.995
			registration.inputs.args = '--float'
			registration.inputs.num_threads = 4
			registration.plugin_args = {'qsub_args': '-pe orte 4', 'sbatch_args': '--mem=6G -c 4'}

			registration.inputs.moving_image = functional_BET_res.outputs.out_file
			registration.inputs.output_warped_image = '{}_ofM{}.nii.gz'.format(participant,i)
			res = registration.run()