Exemple #1
0
    def _run_interface(self, runtime):
        out_file = self._gen_outfilename()
        src_file = self.inputs.src_file
        ref_file = self.inputs.ref_file

        # Collect orientation infos

        # "orientation" => 3 letter acronym defining orientation
        src_orient = fs.utils.ImageInfo(
            in_file=src_file).run().outputs.orientation
        ref_orient = fs.utils.ImageInfo(
            in_file=ref_file).run().outputs.orientation
        # "convention" => RADIOLOGICAL/NEUROLOGICAL
        src_conv = cmp_fsl.Orient(in_file=src_file,
                                  get_orient=True).run().outputs.orient
        ref_conv = cmp_fsl.Orient(in_file=ref_file,
                                  get_orient=True).run().outputs.orient

        if src_orient == ref_orient:
            # no reorientation needed
            print "No reorientation needed for anatomical image; Copy only!"
            copyfile(src_file, out_file, False, False, 'content')
            return runtime
        else:
            if src_conv != ref_conv:
                # if needed, match convention (radiological/neurological) to reference
                tmpsrc = os.path.join(os.path.dirname(src_file),
                                      'tmp_' + os.path.basename(src_file))

                fsl.SwapDimensions(in_file=src_file,
                                   new_dims=('-x', 'y', 'z'),
                                   out_file=tmpsrc).run()

                cmp_fsl.Orient(in_file=tmpsrc, swap_orient=True).run()
            else:
                # If conventions match, just use the original source
                tmpsrc = src_file

        tmp2 = os.path.join(os.path.dirname(src_file), 'tmp.nii.gz')
        map_orient = {
            'L': 'RL',
            'R': 'LR',
            'A': 'PA',
            'P': 'AP',
            'S': 'IS',
            'I': 'SI'
        }
        fsl.SwapDimensions(in_file=tmpsrc,
                           new_dims=(map_orient[ref_orient[0]],
                                     map_orient[ref_orient[1]],
                                     map_orient[ref_orient[2]]),
                           out_file=tmp2).run()

        shutil.move(tmp2, out_file)

        # Only remove the temporary file if the conventions did not match.  Otherwise,
        # we end up removing the output.
        if tmpsrc != src_file:
            os.remove(tmpsrc)
        return runtime
Exemple #2
0
def _create_mapnode_reorient_pipeline(name="reorient_pipe",
                                      new_dims=("x", "z", "-y")):
    """
    By kepkee:
    fslswapdim image_bad x z -y image_good
    fslorient -deleteorient image_good.nii.gz;
    fslorient -setqformcode 1 image_good.nii.gz
    """
    # creating pipeline
    reorient_pipe = pe.Workflow(name=name)

    # Creating input node
    inputnode = pe.Node(niu.IdentityInterface(fields=['list_img']),
                        name='inputnode')

    swap_dim = pe.MapNode(fsl.SwapDimensions(new_dims=new_dims),
                          name="swap_dim",
                          iterfield=["in_file"])
    reorient_pipe.connect(inputnode, 'list_img', swap_dim, 'in_file')

    deorient = pe.MapNode(FslOrient(main_option="deleteorient"),
                          name="deorient",
                          iterfield=["in_file"])
    reorient_pipe.connect(swap_dim, 'out_file', deorient, 'in_file')

    reorient = pe.MapNode(FslOrient(main_option="setqformcode", code=1),
                          name="reorient",
                          iterfield=["in_file"])
    reorient_pipe.connect(deorient, 'out_file', reorient, 'in_file')

    return reorient_pipe
Exemple #3
0
def vmhc(wf, cfg, strat_pool, pipe_num, opt=None):
    '''Compute Voxel-Mirrored Homotopic Connectivity.

    VMHC is the map of brain functional homotopy, the high degree of
    synchrony in spontaneous activity between geometrically corresponding
    interhemispheric (i.e., homotopic) regions.

    Node Block:
    {"name": "vmhc",
     "config": ["voxel_mirrored_homotopic_connectivity"],
     "switch": ["run"],
     "option_key": "None",
     "option_val": "None",
     "inputs": [["space-symtemplate_desc-cleaned-sm_bold",
                 "space-symtemplate_desc-brain-sm_bold",
                 "space-symtemplate_desc-preproc-sm_bold",
                 "space-symtemplate_desc-sm_bold"]],
     "outputs": ["vmhc"]}
    '''

    # write out a swapped version of the file
    # copy and L/R swap file
    copy_and_L_R_swap = pe.Node(interface=fsl.SwapDimensions(),
                                name=f'copy_and_L_R_swap_{pipe_num}',
                                mem_gb=3.0)

    copy_and_L_R_swap.inputs.new_dims = ('-x', 'y', 'z')

    node, out = strat_pool.get_data([
        "space-symtemplate_desc-cleaned-sm_bold",
        "space-symtemplate_desc-brain-sm_bold",
        "space-symtemplate_desc-preproc-sm_bold",
        "space-symtemplate_desc-sm_bold"
    ])
    wf.connect(node, out, copy_and_L_R_swap, 'in_file')

    # calculate correlation between original and swapped images
    pearson_correlation = pe.Node(interface=preprocess.TCorrelate(),
                                  name=f'pearson_correlation_{pipe_num}',
                                  mem_gb=3.0)

    pearson_correlation.inputs.pearson = True
    pearson_correlation.inputs.polort = -1
    pearson_correlation.inputs.outputtype = 'NIFTI_GZ'

    wf.connect(node, out, pearson_correlation, 'xset')

    wf.connect(copy_and_L_R_swap, 'out_file', pearson_correlation, 'yset')

    outputs = {'vmhc': (pearson_correlation, 'out_file')}

    return (wf, outputs)
Exemple #4
0
 def average_template_( self, Template_4D, List, Template ):
     """Merge the list in a 4D image; average the 4D imge in a 3D image; flip the image and and average the flipped and unflipped iamges."""
     try:
         #
         #
         # merge tissues in a 4D file
         merger = fsl.Merge()
         merger.inputs.in_files     =  List
         merger.inputs.dimension    = 't'
         merger.inputs.output_type  = 'NIFTI_GZ'
         merger.inputs.merged_file  =  Template_4D
         merger.run()
         # average over frames
         maths = fsl.ImageMaths( in_file   =  Template_4D, 
                                 op_string = '-Tmean', 
                                 out_file  =  Template )
         maths.run();
         # Flip the frames
         swap = fsl.SwapDimensions()
         swap.inputs.in_file   = Template
         swap.inputs.new_dims  = ("-x","y","z")
         swap.inputs.out_file  = "%s_flipped.nii.gz"%Template[:-7]
         swap.run()
         # average the frames
         maths = fsl.ImageMaths( in_file   =  Template, 
                                 op_string = '-add %s -div 2'%Template[:-7], 
                                 out_file  =  Template )
         maths.run();
     #
     #
     except Exception as inst:
         print inst
         _log.error(inst)
         quit(-1)
     except IOError as e:
         print "I/O error({0}): {1}".format(e.errno, e.strerror)
         quit(-1)
     except:
         print "Unexpected error:", sys.exc_info()[0]
         quit(-1)
Exemple #5
0
def legacy(
    bids_base,
    template,
    debug=False,
    functional_blur_xy=False,
    functional_match={},
    keep_work=False,
    n_jobs=False,
    n_jobs_percentage=0.8,
    out_base=None,
    realign="time",
    registration_mask=False,
    sessions=[],
    structural_match={},
    subjects=[],
    tr=1,
    workflow_name='legacy',
    enforce_dummy_scans=DUMMY_SCANS,
    exclude={},
):
    '''
	Legacy realignment and registration workflow representative of the tweaks and workarounds commonly used in the pre-SAMRI period.

	Parameters
	----------
	bids_base : str
		Path to the BIDS data set root.
	template : str
		Path to the template to register the data to.
	debug : bool, optional
		Whether to enable nipype debug mode.
		This increases logging.
	exclude : dict
		A dictionary with any combination of "sessions", "subjects", "tasks" as keys and corresponding identifiers as values.
		If this is specified matching entries will be excluded in the analysis.
	functional_blur_xy : float, optional
		Factor by which to smooth data in the xy-plane; if parameter evaluates to false, no smoothing will be applied.
		Ideally this value should correspond to the resolution or smoothness in the z-direction (assuing z represents the lower-resolution slice-encoding direction).
	functional_match : dict, optional
		Dictionary specifying a whitelist to use for functional data inclusion into the workflow; if dictionary is empty no whitelist is present and all data will be considered.
		The dictionary should have keys which are 'acquisition', 'task', or 'modality', and values which are lists of acceptable strings for the respective BIDS field.
	keep_work : bool, str
		Whether to keep the work directory after workflow conclusion (this directory contains all the intermediary processing commands, inputs, and outputs --- it is invaluable for debugging but many times larger in size than the actual output).
	n_jobs : int, optional
		Number of processors to maximally use for the workflow; if unspecified a best guess will be estimate based on `n_jobs_percentage` and hardware (but not on current load).
	n_jobs_percentage : float, optional
		Percentage of available processors (as in available hardware, not available free load) to maximally use for the workflow (this is overriden by `n_jobs`).
	out_base : str, optional
		Output base directory - inside which a directory named `workflow_name` (as well as associated directories) will be created.
	realign : {"space","time","spacetime",""}, optional
		Parameter that dictates slictiming correction and realignment of slices. "time" (FSL.SliceTimer) is default, since it works safely. Use others only with caution!
	registration_mask : str, optional
		Mask to use for the registration process.
		This mask will constrain the area for similarity metric evaluation, but the data will not be cropped.
	sessions : list, optional
		A whitelist of sessions to include in the workflow, if the list is empty there is no whitelist and all sessions will be considered.
	structural_match : dict, optional
		Dictionary specifying a whitelist to use for structural data inclusion into the workflow; if dictionary is empty no whitelist is present and all data will be considered.
		The dictionary should have keys which are 'acquisition', or 'modality', and values which are lists of acceptable strings for the respective BIDS field.
	subjects : list, optional
		A whitelist of subjects to include in the workflow, if the list is empty there is no whitelist and all sessions will be considered.
	tr : float, optional
		Repetition time, explicitly.
		WARNING! This is a parameter waiting for deprecation.
	workflow_name : str, optional
		Top level name for the output directory.
	'''

    try:
        import nipype.interfaces.ants.legacy as antslegacy
    except ModuleNotFoundError:
        print('''
			The `nipype.interfaces.ants.legacy` was not found on this system.
			You may want to downgrade nipype to e.g. 1.1.1, as this module has been removed in more recent versions:
			https://github.com/nipy/nipype/issues/3197
		''')

    bids_base, out_base, out_dir, template, registration_mask, data_selection, functional_scan_types, structural_scan_types, subjects_sessions, func_ind, struct_ind = common_select(
        bids_base,
        out_base,
        workflow_name,
        template,
        registration_mask,
        functional_match,
        structural_match,
        subjects,
        sessions,
        exclude,
    )

    if not n_jobs:
        n_jobs = max(int(round(mp.cpu_count() * n_jobs_percentage)), 2)

    get_f_scan = pe.Node(name='get_f_scan',
                         interface=util.Function(
                             function=get_bids_scan,
                             input_names=inspect.getargspec(get_bids_scan)[0],
                             output_names=[
                                 'scan_path', 'scan_type', 'task', 'nii_path',
                                 'nii_name', 'events_name', 'subject_session',
                                 'metadata_filename', 'dict_slice', 'ind_type'
                             ]))
    get_f_scan.inputs.ignore_exception = True
    get_f_scan.inputs.data_selection = data_selection
    get_f_scan.inputs.bids_base = bids_base
    get_f_scan.iterables = ("ind_type", func_ind)

    dummy_scans = pe.Node(
        name='dummy_scans',
        interface=util.Function(
            function=force_dummy_scans,
            input_names=inspect.getargspec(force_dummy_scans)[0],
            output_names=['out_file', 'deleted_scans']))
    dummy_scans.inputs.desired_dummy_scans = enforce_dummy_scans

    events_file = pe.Node(
        name='events_file',
        interface=util.Function(
            function=write_bids_events_file,
            input_names=inspect.getargspec(write_bids_events_file)[0],
            output_names=['out_file']))

    temporal_mean = pe.Node(interface=fsl.MeanImage(), name="temporal_mean")

    f_resize = pe.Node(interface=VoxelResize(), name="f_resize")
    f_resize.inputs.resize_factors = [10, 10, 10]

    f_percentile = pe.Node(interface=fsl.ImageStats(), name="f_percentile")
    f_percentile.inputs.op_string = '-p 98'

    f_threshold = pe.Node(interface=fsl.Threshold(), name="f_threshold")

    f_fast = pe.Node(interface=fsl.FAST(), name="f_fast")
    f_fast.inputs.no_pve = True
    f_fast.inputs.output_biascorrected = True

    f_bet = pe.Node(interface=fsl.BET(), name="f_BET")

    f_swapdim = pe.Node(interface=fsl.SwapDimensions(), name="f_swapdim")
    f_swapdim.inputs.new_dims = ('x', '-z', '-y')

    f_deleteorient = pe.Node(interface=FSLOrient(), name="f_deleteorient")
    f_deleteorient.inputs.main_option = 'deleteorient'

    datasink = pe.Node(nio.DataSink(), name='datasink')
    datasink.inputs.base_directory = out_dir
    datasink.inputs.parameterization = False

    workflow_connections = [
        (get_f_scan, dummy_scans, [('nii_path', 'in_file')]),
        (dummy_scans, events_file, [('deleted_scans', 'forced_dummy_scans')]),
        (dummy_scans, f_resize, [('out_file', 'in_file')]),
        (get_f_scan, events_file, [('nii_path', 'timecourse_file'),
                                   ('task', 'task'),
                                   ('scan_path', 'scan_dir')]),
        (events_file, datasink, [('out_file', 'func.@events')]),
        (get_f_scan, events_file, [('events_name', 'out_file')]),
        (get_f_scan, datasink, [(('subject_session', ss_to_path), 'container')
                                ]),
        (temporal_mean, f_percentile, [('out_file', 'in_file')]),
        # here we divide by 10 assuming 10 percent noise
        (f_percentile, f_threshold, [(('out_stat', divideby_10), 'thresh')]),
        (temporal_mean, f_threshold, [('out_file', 'in_file')]),
        (f_threshold, f_fast, [('out_file', 'in_files')]),
        (f_fast, f_bet, [('restored_image', 'in_file')]),
        (f_resize, f_deleteorient, [('out_file', 'in_file')]),
        (f_deleteorient, f_swapdim, [('out_file', 'in_file')]),
    ]

    if realign == "space":
        realigner = pe.Node(interface=spm.Realign(), name="realigner")
        realigner.inputs.register_to_mean = True
        workflow_connections.extend([
            (f_swapdim, realigner, [('out_file', 'in_file')]),
        ])

    elif realign == "spacetime":
        realigner = pe.Node(interface=nipy.SpaceTimeRealigner(),
                            name="realigner")
        realigner.inputs.slice_times = "asc_alt_2"
        realigner.inputs.tr = tr
        realigner.inputs.slice_info = 3  #3 for coronal slices (2 for horizontal, 1 for sagittal)
        workflow_connections.extend([
            (f_swapdim, realigner, [('out_file', 'in_file')]),
        ])

    elif realign == "time":
        realigner = pe.Node(interface=fsl.SliceTimer(), name="slicetimer")
        realigner.inputs.time_repetition = tr
        workflow_connections.extend([
            (f_swapdim, realigner, [('out_file', 'in_file')]),
        ])

    f_antsintroduction = pe.Node(interface=antslegacy.antsIntroduction(),
                                 name='ants_introduction')
    f_antsintroduction.inputs.dimension = 3
    f_antsintroduction.inputs.reference_image = template
    #will need updating to `1`
    f_antsintroduction.inputs.bias_field_correction = True
    f_antsintroduction.inputs.transformation_model = 'GR'
    f_antsintroduction.inputs.max_iterations = [8, 15, 8]

    f_warp = pe.Node(interface=ants.WarpTimeSeriesImageMultiTransform(),
                     name='f_warp')
    f_warp.inputs.reference_image = template
    f_warp.inputs.dimension = 4

    f_copysform2qform = pe.Node(interface=FSLOrient(),
                                name='f_copysform2qform')
    f_copysform2qform.inputs.main_option = 'copysform2qform'

    warp_merge = pe.Node(util.Merge(2), name='warp_merge')

    workflow_connections.extend([
        (f_bet, f_antsintroduction, [('out_file', 'input_image')]),
        (f_antsintroduction, warp_merge, [('warp_field', 'in1')]),
        (f_antsintroduction, warp_merge, [('affine_transformation', 'in2')]),
        (warp_merge, f_warp, [('out', 'transformation_series')]),
        (f_warp, f_copysform2qform, [('output_image', 'in_file')]),
    ])
    if realign == "space":
        workflow_connections.extend([
            (realigner, temporal_mean, [('realigned_files', 'in_file')]),
            (realigner, f_warp, [('realigned_files', 'input_image')]),
        ])
    elif realign == "spacetime":
        workflow_connections.extend([
            (realigner, temporal_mean, [('out_file', 'in_file')]),
            (realigner, f_warp, [('out_file', 'input_image')]),
        ])
    elif realign == "time":
        workflow_connections.extend([
            (realigner, temporal_mean, [('slice_time_corrected_file',
                                         'in_file')]),
            (realigner, f_warp, [('slice_time_corrected_file', 'input_image')
                                 ]),
        ])
    else:
        workflow_connections.extend([
            (f_resize, temporal_mean, [('out_file', 'in_file')]),
            (f_swapdim, f_warp, [('out_file', 'input_image')]),
        ])

    if functional_blur_xy:
        blur = pe.Node(interface=afni.preprocess.BlurToFWHM(), name="blur")
        blur.inputs.fwhmxy = functional_blur_xy
        workflow_connections.extend([
            (get_f_scan, blur, [('nii_name', 'out_file')]),
            (f_copysform2qform, blur, [('out_file', 'in_file')]),
            (blur, datasink, [('out_file', 'func')]),
        ])
    else:

        f_rename = pe.Node(util.Rename(), name='f_rename')

        workflow_connections.extend([
            (get_f_scan, f_rename, [('nii_name', 'format_string')]),
            (f_copysform2qform, f_rename, [('out_file', 'in_file')]),
            (f_rename, datasink, [('out_file', 'func')]),
        ])

    workflow_config = {
        'execution': {
            'crashdump_dir': path.join(out_base, 'crashdump'),
        }
    }
    if debug:
        workflow_config['logging'] = {
            'workflow_level': 'DEBUG',
            'utils_level': 'DEBUG',
            'interface_level': 'DEBUG',
            'filemanip_level': 'DEBUG',
            'log_to_file': 'true',
        }

    workdir_name = workflow_name + "_work"
    #this gives the name of the workdir, the output name is passed to the datasink
    workflow = pe.Workflow(name=workdir_name)
    workflow.connect(workflow_connections)
    workflow.base_dir = out_base
    workflow.config = workflow_config
    try:
        workflow.write_graph(dotfilename=path.join(workflow.base_dir,
                                                   workdir_name, "graph.dot"),
                             graph2use="hierarchical",
                             format="png")
    except OSError:
        print(
            'We could not write the DOT file for visualization (`dot` function from the graphviz package). This is non-critical to the processing, but you should get this fixed.'
        )

    workflow.run(plugin="MultiProc", plugin_args={'n_procs': n_jobs})
    copy_bids_files(bids_base, os.path.join(out_base, workflow_name))
    if not keep_work:
        workdir = path.join(workflow.base_dir, workdir_name)
        try:
            shutil.rmtree(workdir)
        except OSError as e:
            if str(e) == 'Cannot call rmtree on a symbolic link':
                print(
                    'Not deleting top level workdir (`{}`), as it is a symlink. Deleting only contents instead'
                    .format(workdir))
                for file_object in os.listdir(workdir):
                    file_object_path = os.path.join(workdir, file_object)
                    if os.path.isfile(file_object_path):
                        os.unlink(file_object_path)
                    else:
                        shutil.rmtree(file_object_path)
            else:
                raise OSError(str(e))
Exemple #6
0
def create_vmhc(use_ants, name='vmhc_workflow', ants_threads=1):
    """
    Compute the map of brain functional homotopy, the high degree of synchrony in spontaneous activity between geometrically corresponding interhemispheric (i.e., homotopic) regions.



    Parameters
    ----------

    None

    Returns
    -------

    vmhc_workflow : workflow

        Voxel Mirrored Homotopic Connectivity Analysis Workflow



    Notes
    -----

    `Source <https://github.com/FCP-INDI/C-PAC/blob/master/CPAC/vmhc/vmhc.py>`_ 

    Workflow Inputs::

        inputspec.brain : string (existing nifti file)
            Anatomical image(without skull)

        inputspec.symmetric_brain : string (existing nifti file)
            MNI152_T1_2mm_symmetric_brain.nii.gz
 
        inputspec.rest_res_filt : string (existing nifti file)
            Band passed Image with nuisance signal regressed out(and optionally scrubbed). Recommended bandpass filter (0.001,0.1) )

        inputspec.reorient : string (existing nifti file)
            RPI oriented anatomical data

        inputspec.example_func2highres_mat : string (existing affine transformation .mat file)
            Specifies an affine transform that should be applied to the example_func before non linear warping

        inputspec.standard_for_func: string (existing nifti file)
            MNI152_T1_standard_resolution_brain.nii.gz

        inputspec.symmetric_skull : string (existing nifti file)
            MNI152_T1_2mm_symmetric.nii.gz

        inputspec.twomm_brain_mask_dil : string (existing nifti file)
            MNI152_T1_2mm_brain_mask_symmetric_dil.nii.gz

        inputspec.config_file_twomm_symmetric : string (existing .cnf file)
            T1_2_MNI152_2mm_symmetric.cnf

        inputspec.rest_mask : string (existing nifti file)
            A mask functional volume(derived by dilation from motion corrected functional volume)

        fwhm_input.fwhm : list (float) 
            For spatial smoothing the Z-transformed correlations in MNI space.
            Generally the value of this parameter is 1.5 or 2 times the voxel size of the input Image.

        inputspec.mean_functional : string (existing nifti file)
            The mean functional image for use in the func-to-anat registration matrix conversion
            to ITK (ANTS) format, if the user selects to use ANTS.

        
    Workflow Outputs::

        outputspec.highres2symmstandard : string (nifti file)
            Linear registration of T1 image to symmetric standard image

        outputspec.highres2symmstandard_mat : string (affine transformation .mat file)
            An affine transformation .mat file from linear registration and used in non linear registration

        outputspec.highres2symmstandard_warp : string (nifti file)
            warp file from Non Linear registration of T1 to symmetrical standard brain

        outputspec.fnirt_highres2symmstandard : string (nifti file)
            Non Linear registration of T1 to symmetrical standard brain

        outputspec.highres2symmstandard_jac : string (nifti file)
            jacobian determinant image from Non Linear registration of T1 to symmetrical standard brain

        outputspec.rest_res_2symmstandard : string (nifti file)
            nonlinear registration (func to standard) image

        outputspec.VMHC_FWHM_img : string (nifti file)
            pearson correlation between res2standard and flipped res2standard

        outputspec.VMHC_Z_FWHM_img : string (nifti file)
            Fisher Z transform map

        outputspec.VMHC_Z_stat_FWHM_img : string (nifti file)
            Z statistic map

    Order of commands:

    - Perform linear registration of Anatomical brain in T1 space to symmetric standard space. For details see `flirt <http://www.fmrib.ox.ac.uk/fsl/flirt/index.html>`_::

        flirt
        -ref MNI152_T1_2mm_symmetric_brain.nii.gz
        -in mprage_brain.nii.gz
        -out highres2symmstandard.nii.gz
        -omat highres2symmstandard.mat
        -cost corratio
        -searchcost corratio
        -dof 12
        -interp trilinear    
        
    - Perform nonlinear registration (higres to standard) to symmetric standard brain. For details see `fnirt <http://fsl.fmrib.ox.ac.uk/fsl/fnirt/>`_::
    
        fnirt
        --in=head.nii.gz
        --aff=highres2symmstandard.mat
        --cout=highres2symmstandard_warp.nii.gz
        --iout=fnirt_highres2symmstandard.nii.gz
        --jout=highres2symmstandard_jac.nii.gz
        --config=T1_2_MNI152_2mm_symmetric.cnf
        --ref=MNI152_T1_2mm_symmetric.nii.gz
        --refmask=MNI152_T1_2mm_brain_mask_symmetric_dil.nii.gz
        --warpres=10,10,10 

    - Perform spatial smoothing on the input functional image(inputspec.rest_res_filt).  For details see `PrinciplesSmoothing <http://imaging.mrc-cbu.cam.ac.uk/imaging/PrinciplesSmoothing>`_ `fslmaths <http://www.fmrib.ox.ac.uk/fslcourse/lectures/practicals/intro/index.htm>`_::

        fslmaths rest_res_filt.nii.gz
        -kernel gauss FWHM/ sqrt(8-ln(2))
        -fmean -mas rest_mask.nii.gz
        rest_res_filt_FWHM.nii.gz
        
    - Apply nonlinear registration (func to standard). For details see  `applywarp <http://www.fmrib.ox.ac.uk/fsl/fnirt/warp_utils.html#applywarp>`_::
        
        applywarp
        --ref=MNI152_T1_2mm_symmetric.nii.gz
        --in=rest_res_filt_FWHM.nii.gz
        --out=rest_res_2symmstandard.nii.gz
        --warp=highres2symmstandard_warp.nii.gz
        --premat=example_func2highres.mat
        
        
    - Copy and L/R swap the output of applywarp command (rest_res_2symmstandard.nii.gz). For details see  `fslswapdim <http://fsl.fmrib.ox.ac.uk/fsl/fsl4.0/avwutils/index.html>`_::

        fslswapdim
        rest_res_2symmstandard.nii.gz
        -x y z
        tmp_LRflipped.nii.gz


    - Calculate pearson correlation between rest_res_2symmstandard.nii.gz and flipped rest_res_2symmstandard.nii.gz(tmp_LRflipped.nii.gz). For details see  `3dTcorrelate <http://afni.nimh.nih.gov/pub/dist/doc/program_help/3dTcorrelate.html>`_::
        
        3dTcorrelate
        -pearson
        -polort -1
        -prefix VMHC_FWHM.nii.gz
        rest_res_2symmstandard.nii.gz
        tmp_LRflipped.nii.gz
    
    
    - Fisher Z Transform the correlation. For details see `3dcalc <http://afni.nimh.nih.gov/pub/dist/doc/program_help/3dcalc.html>`_::
        
        3dcalc
        -a VMHC_FWHM.nii.gz
        -expr 'log((a+1)/(1-a))/2'
        -prefix VMHC_FWHM_Z.nii.gz
    
        
    - Calculate the number of volumes(nvols) in flipped rest_res_2symmstandard.nii.gz(tmp_LRflipped.nii.gz) ::
        
        -Use Nibabel to do this
        
        
    - Compute the Z statistic map ::
        
        3dcalc
        -a VMHC_FWHM_Z.nii.gz
        -expr 'a*sqrt('${nvols}'-3)'
        -prefix VMHC_FWHM_Z_stat.nii.gz
    
    
    Workflow:
    
    .. image:: ../images/vmhc_graph.dot.png
        :width: 500 
    
    Workflow Detailed:
    
    .. image:: ../images/vmhc_detailed_graph.dot.png
        :width: 500 
    

    References
    ----------
    
    .. [1] Zuo, X.-N., Kelly, C., Di Martino, A., Mennes, M., Margulies, D. S., Bangaru, S., Grzadzinski, R., et al. (2010). Growing together and growing apart: regional and sex differences in the lifespan developmental trajectories of functional homotopy. The Journal of neuroscience : the official journal of the Society for Neuroscience, 30(45), 15034-43. doi:10.1523/JNEUROSCI.2612-10.2010


    Examples
    --------
    
    >>> vmhc_w = create_vmhc()
    >>> vmhc_w.inputs.inputspec.symmetric_brain = 'MNI152_T1_2mm_symmetric_brain.nii.gz'
    >>> vmhc_w.inputs.inputspec.symmetric_skull = 'MNI152_T1_2mm_symmetric.nii.gz'
    >>> vmhc_w.inputs.inputspec.twomm_brain_mask_dil = 'MNI152_T1_2mm_brain_mask_symmetric_dil.nii.gz'
    >>> vmhc_w.inputs.inputspec.config_file_twomm = 'T1_2_MNI152_2mm_symmetric.cnf'
    >>> vmhc_w.inputs.inputspec.standard_for_func= 'MNI152_T1_2mm.nii.gz'
    >>> vmhc_w.inputs.fwhm_input.fwhm = [4.5, 6]
    >>> vmhc_w.get_node('fwhm_input').iterables = ('fwhm', [4.5, 6])
    >>> vmhc_w.inputs.inputspec.rest_res = os.path.abspath('/home/data/Projects/Pipelines_testing/Dickstein/subjects/s1001/func/original/rest_res_filt.nii.gz')
    >>> vmhc_w.inputs.inputspec.reorient = os.path.abspath('/home/data/Projects/Pipelines_testing/Dickstein/subjects/s1001/anat/mprage_RPI.nii.gz')
    >>> vmhc_w.inputs.inputspec.brain = os.path.abspath('/home/data/Projects/Pipelines_testing/Dickstein/subjects/s1001/anat/mprage_brain.nii.gz')
    >>> vmhc_w.inputs.inputspec.example_func2highres_mat = os.path.abspath('/home/data/Projects/Pipelines_testing/Dickstein/subjects/s1001/func/original/reg/example_func2highres.mat')
    >>> vmhc_w.inputs.inputspec.rest_mask = os.path.abspath('/home/data/Projects/Pipelines_testing/Dickstein/subjects/s1001/func/original/rest_mask.nii.gz')
    >>> vmhc_w.run() # doctest: +SKIP

    """

    vmhc = pe.Workflow(name=name)

    inputNode = pe.Node(util.IdentityInterface(fields=[
        'rest_res', 'example_func2highres_mat', 'rest_mask',
        'standard_for_func', 'mean_functional', 'brain',
        'fnirt_nonlinear_warp', 'ants_symm_initial_xfm', 'ants_symm_rigid_xfm',
        'ants_symm_affine_xfm', 'ants_symm_warp_field'
    ]),
                        name='inputspec')

    outputNode = pe.Node(util.IdentityInterface(fields=[
        'rest_res_2symmstandard', 'VMHC_FWHM_img', 'VMHC_Z_FWHM_img',
        'VMHC_Z_stat_FWHM_img'
    ]),
                         name='outputspec')

    inputnode_fwhm = pe.Node(util.IdentityInterface(fields=['fwhm']),
                             name='fwhm_input')

    if use_ants == False:
        # Apply nonlinear registration (func to standard)
        nonlinear_func_to_standard = pe.Node(interface=fsl.ApplyWarp(),
                                             name='nonlinear_func_to_standard')

    elif use_ants == True:
        # ANTS warp image etc.
        fsl_to_itk_vmhc = create_wf_c3d_fsl_to_itk(0, name='fsl_to_itk_vmhc')

        collect_transforms_vmhc = create_wf_collect_transforms(
            0, name='collect_transforms_vmhc')

        apply_ants_xfm_vmhc = create_wf_apply_ants_warp(
            0, name='apply_ants_xfm_vmhc', ants_threads=ants_threads)

        # this has to be 3 instead of default 0 because it is a 4D file
        apply_ants_xfm_vmhc.inputs.inputspec.input_image_type = 3

    # copy and L/R swap file
    copy_and_L_R_swap = pe.Node(interface=fsl.SwapDimensions(),
                                name='copy_and_L_R_swap')
    copy_and_L_R_swap.inputs.new_dims = ('-x', 'y', 'z')

    # calculate vmhc
    pearson_correlation = pe.Node(interface=preprocess.TCorrelate(),
                                  name='pearson_correlation')
    pearson_correlation.inputs.pearson = True
    pearson_correlation.inputs.polort = -1
    pearson_correlation.inputs.outputtype = 'NIFTI_GZ'

    try:
        z_trans = pe.Node(interface=preprocess.Calc(), name='z_trans')
        z_stat = pe.Node(interface=preprocess.Calc(), name='z_stat')
    except AttributeError:
        from nipype.interfaces.afni import utils as afni_utils
        z_trans = pe.Node(interface=afni_utils.Calc(), name='z_trans')
        z_stat = pe.Node(interface=afni_utils.Calc(), name='z_stat')

    z_trans.inputs.expr = 'log((1+a)/(1-a))/2'
    z_trans.inputs.outputtype = 'NIFTI_GZ'
    z_stat.inputs.outputtype = 'NIFTI_GZ'

    NVOLS = pe.Node(util.Function(input_names=['in_files'],
                                  output_names=['nvols'],
                                  function=get_img_nvols),
                    name='NVOLS')

    generateEXP = pe.Node(util.Function(input_names=['nvols'],
                                        output_names=['expr'],
                                        function=get_operand_expression),
                          name='generateEXP')

    smooth = pe.Node(interface=fsl.MultiImageMaths(), name='smooth')

    if use_ants == False:
        vmhc.connect(inputNode, 'rest_res', smooth, 'in_file')
        vmhc.connect(inputnode_fwhm, ('fwhm', set_gauss), smooth, 'op_string')
        vmhc.connect(inputNode, 'rest_mask', smooth, 'operand_files')
        vmhc.connect(smooth, 'out_file', nonlinear_func_to_standard, 'in_file')
        vmhc.connect(inputNode, 'standard_for_func',
                     nonlinear_func_to_standard, 'ref_file')
        vmhc.connect(inputNode, 'fnirt_nonlinear_warp',
                     nonlinear_func_to_standard, 'field_file')
        ## func->anat matrix (bbreg)
        vmhc.connect(inputNode, 'example_func2highres_mat',
                     nonlinear_func_to_standard, 'premat')
        vmhc.connect(nonlinear_func_to_standard, 'out_file', copy_and_L_R_swap,
                     'in_file')
        vmhc.connect(nonlinear_func_to_standard, 'out_file',
                     pearson_correlation, 'xset')

    elif use_ants == True:
        # connections for ANTS stuff

        # functional apply warp stuff
        vmhc.connect(inputNode, 'rest_res', smooth, 'in_file')
        vmhc.connect(inputnode_fwhm, ('fwhm', set_gauss), smooth, 'op_string')
        vmhc.connect(inputNode, 'rest_mask', smooth, 'operand_files')

        vmhc.connect(smooth, 'out_file', apply_ants_xfm_vmhc,
                     'inputspec.input_image')

        vmhc.connect(inputNode, 'ants_symm_initial_xfm',
                     collect_transforms_vmhc, 'inputspec.linear_initial')

        vmhc.connect(inputNode, 'ants_symm_rigid_xfm', collect_transforms_vmhc,
                     'inputspec.linear_rigid')

        vmhc.connect(inputNode, 'ants_symm_affine_xfm',
                     collect_transforms_vmhc, 'inputspec.linear_affine')

        vmhc.connect(inputNode, 'ants_symm_warp_field',
                     collect_transforms_vmhc, 'inputspec.warp_file')

        # func->anat matrix (bbreg)
        vmhc.connect(inputNode, 'example_func2highres_mat', fsl_to_itk_vmhc,
                     'inputspec.affine_file')

        vmhc.connect(inputNode, 'brain', fsl_to_itk_vmhc,
                     'inputspec.reference_file')

        vmhc.connect(inputNode, 'mean_functional', fsl_to_itk_vmhc,
                     'inputspec.source_file')

        vmhc.connect(fsl_to_itk_vmhc, 'outputspec.itk_transform',
                     collect_transforms_vmhc, 'inputspec.fsl_to_itk_affine')

        vmhc.connect(inputNode, 'standard_for_func', apply_ants_xfm_vmhc,
                     'inputspec.reference_image')

        vmhc.connect(collect_transforms_vmhc,
                     'outputspec.transformation_series', apply_ants_xfm_vmhc,
                     'inputspec.transforms')

        vmhc.connect(apply_ants_xfm_vmhc, 'outputspec.output_image',
                     copy_and_L_R_swap, 'in_file')

        vmhc.connect(apply_ants_xfm_vmhc, 'outputspec.output_image',
                     pearson_correlation, 'xset')

    vmhc.connect(copy_and_L_R_swap, 'out_file', pearson_correlation, 'yset')
    vmhc.connect(pearson_correlation, 'out_file', z_trans, 'in_file_a')
    vmhc.connect(copy_and_L_R_swap, 'out_file', NVOLS, 'in_files')
    vmhc.connect(NVOLS, 'nvols', generateEXP, 'nvols')
    vmhc.connect(z_trans, 'out_file', z_stat, 'in_file_a')
    vmhc.connect(generateEXP, 'expr', z_stat, 'expr')

    if use_ants == False:
        vmhc.connect(nonlinear_func_to_standard, 'out_file', outputNode,
                     'rest_res_2symmstandard')

    elif use_ants == True:
        # ANTS warp outputs to outputnode
        vmhc.connect(apply_ants_xfm_vmhc, 'outputspec.output_image',
                     outputNode, 'rest_res_2symmstandard')

    vmhc.connect(pearson_correlation, 'out_file', outputNode, 'VMHC_FWHM_img')
    vmhc.connect(z_trans, 'out_file', outputNode, 'VMHC_Z_FWHM_img')
    vmhc.connect(z_stat, 'out_file', outputNode, 'VMHC_Z_stat_FWHM_img')

    return vmhc
Exemple #7
0
    def preprocess_channels_pipeline(self, **name_maps):
        pipeline = self.new_pipeline(
            'preprocess_channels',
            name_maps=name_maps,
            desc=("Convert channel signals in complex coords to polar coords "
                  "and combine"))

        if (self.provided('header_image') or
                self.branch('reorient_to_std') or
                self.parameter('force_channel_flip') is not None):
            # Read channel files reorient them into standard space and then
            # write back to directory
            list_channels = pipeline.add(
                'list_channels',
                ListDir(),
                inputs={
                    'directory': ('channels', multi_nifti_gz_format)})

            if self.parameter('force_channel_flip') is not None:
                force_flip = pipeline.add(
                    'flip_dims',
                    fsl.SwapDimensions(
                        new_dims=tuple(self.parameter('force_channel_flip'))),
                    inputs={
                        'in_file': (list_channels, 'files')},
                    iterfield=['in_file'])
                geom_dest_file = (force_flip, 'out_file')
            else:
                geom_dest_file = (list_channels, 'files')

            if self.provided('header_image'):
                # If header image is provided stomp its geometry over the
                # acquired channels
                copy_geom = pipeline.add(
                    'qsm_copy_geometry',
                    fsl.CopyGeom(
                        output_type='NIFTI_GZ'),
                    inputs={
                        'in_file': ('header_image', nifti_gz_format),
                        'dest_file': geom_dest_file},
                    iterfield=(['dest_file']),
                    requirements=[fsl_req.v('5.0.8')])
                reorient_in_file = (copy_geom, 'out_file')
            else:
                reorient_in_file = geom_dest_file

            if self.branch('reorient_to_std'):
                reorient = pipeline.add(
                    'reorient_channel',
                    fsl.Reorient2Std(
                        output_type='NIFTI_GZ'),
                    inputs={
                        'in_file': reorient_in_file},
                    iterfield=['in_file'],
                    requirements=[fsl_req.v('5.0.8')])
                copy_to_dir_in_files = (reorient, 'out_file')
            else:
                copy_to_dir_in_files = reorient_in_file

            copy_to_dir = pipeline.add(
                'copy_to_dir',
                CopyToDir(),
                inputs={
                    'in_files': copy_to_dir_in_files,
                    'file_names': (list_channels, 'files')})
            to_polar_in_dir = (copy_to_dir, 'out_dir')
        else:
            to_polar_in_dir = ('channels', multi_nifti_gz_format)

        pipeline.add(
            'to_polar',
            ToPolarCoords(
                in_fname_re=self.parameter('channel_fname_regex'),
                real_label=self.parameter('channel_real_label'),
                imaginary_label=self.parameter('channel_imag_label')),
            inputs={
                'in_dir': to_polar_in_dir},
            outputs={
                'mag_channels': ('magnitudes_dir', multi_nifti_gz_format),
                'phase_channels': ('phases_dir', multi_nifti_gz_format)})

        return pipeline
Exemple #8
0
def legacy(
    bids_base,
    template,
    autorotate=False,
    debug=False,
    functional_blur_xy=False,
    functional_match={},
    keep_work=False,
    negative_contrast_agent=False,
    n_procs=N_PROCS,
    out_base=None,
    realign="time",
    registration_mask=False,
    sessions=[],
    structural_match={},
    subjects=[],
    tr=1,
    workflow_name='legacy',
):
    '''
	Legacy realignment and registration workflow representative of the tweaks and workarounds commonly used in the pre-SAMRI period.

	Parameters
	----------
	bids_base : str
		Path to the BIDS data set root.
	template : str
		Path to the template to register the data to.
	autorotate : bool, optional
		Whether to use a multi-rotation-state transformation start.
		This allows the registration to commence with the best rotational fit, and may help if the orientation of the data is malformed with respect to the header.
	debug : bool, optional
		Whether to enable nipype debug mode.
		This increases logging.
	functional_blur_xy : float, optional
		Factor by which to smooth data in the xy-plane; if parameter evaluates to false, no smoothing will be applied.
		Ideally this value should correspond to the resolution or smoothness in the z-direction (assuing z represents the lower-resolution slice-encoding direction).
	functional_match : dict, optional
		Dictionary specifying a whitelist to use for functional data inclusion into the workflow; if dictionary is empty no whitelist is present and all data will be considered.
		The dictionary should have keys which are 'acquisition', 'task', or 'modality', and values which are lists of acceptable strings for the respective BIDS field.
	keep_work : bool, str
		Whether to keep the work directory after workflow conclusion (this directory contains all the intermediary processing commands, inputs, and outputs --- it is invaluable for debugging but many times larger in size than the actual output).
	negative_contrast_agent : bool, optional
		Whether the scan was acquired witn a negative contrast agent given the imaging modality; if true the values will be inverted with respect to zero.
		This is commonly used for iron nano-particle Cerebral Blood Volume (CBV) measurements.
	n_procs : int, optional
		Number of processors to maximally use for the workflow; if unspecified a best guess will be estimate based on hardware (but not on current load).
	out_base : str, optional
		Output base directory --- inside which a directory named `workflow_name` (as well as associated directories) will be created.
	realign : {"space","time","spacetime",""}, optional
		Parameter that dictates slictiming correction and realignment of slices. "time" (FSL.SliceTimer) is default, since it works safely. Use others only with caution!
	registration_mask : str, optional
		Mask to use for the registration process.
		This mask will constrain the area for similarity metric evaluation, but the data will not be cropped.
	sessions : list, optional
		A whitelist of sessions to include in the workflow, if the list is empty there is no whitelist and all sessions will be considered.
	structural_match : dict, optional
		Dictionary specifying a whitelist to use for structural data inclusion into the workflow; if dictionary is empty no whitelist is present and all data will be considered.
		The dictionary should have keys which are 'acquisition', or 'modality', and values which are lists of acceptable strings for the respective BIDS field.
	subjects : list, optional
		A whitelist of subjects to include in the workflow, if the list is empty there is no whitelist and all sessions will be considered.
	tr : float, optional
		Repetition time, explicitly.
		WARNING! This is a parameter waiting for deprecation.
	workflow_name : str, optional
		Top level name for the output directory.
	'''

    bids_base, out_base, out_dir, template, registration_mask, data_selection, functional_scan_types, structural_scan_types, subjects_sessions, func_ind, struct_ind = common_select(
        bids_base,
        out_base,
        workflow_name,
        template,
        registration_mask,
        functional_match,
        structural_match,
        subjects,
        sessions,
    )

    get_f_scan = pe.Node(name='get_f_scan',
                         interface=util.Function(
                             function=get_bids_scan,
                             input_names=inspect.getargspec(get_bids_scan)[0],
                             output_names=[
                                 'scan_path', 'scan_type', 'task', 'nii_path',
                                 'nii_name', 'file_name', 'events_name',
                                 'subject_session'
                             ]))
    get_f_scan.inputs.ignore_exception = True
    get_f_scan.inputs.data_selection = data_selection
    get_f_scan.inputs.bids_base = bids_base
    get_f_scan.iterables = ("ind_type", func_ind)

    dummy_scans = pe.Node(
        name='dummy_scans',
        interface=util.Function(
            function=force_dummy_scans,
            input_names=inspect.getargspec(force_dummy_scans)[0],
            output_names=['out_file', 'deleted_scans']))
    dummy_scans.inputs.desired_dummy_scans = DUMMY_SCANS

    events_file = pe.Node(
        name='events_file',
        interface=util.Function(
            function=write_bids_events_file,
            input_names=inspect.getargspec(write_bids_events_file)[0],
            output_names=['out_file']))

    temporal_mean = pe.Node(interface=fsl.MeanImage(), name="temporal_mean")

    f_resize = pe.Node(interface=VoxelResize(), name="f_resize")
    f_resize.inputs.resize_factors = [10, 10, 10]

    f_percentile = pe.Node(interface=fsl.ImageStats(), name="f_percentile")
    f_percentile.inputs.op_string = '-p 98'

    f_threshold = pe.Node(interface=fsl.Threshold(), name="f_threshold")

    f_fast = pe.Node(interface=fsl.FAST(), name="f_fast")
    f_fast.inputs.no_pve = True
    f_fast.inputs.output_biascorrected = True

    f_bet = pe.Node(interface=fsl.BET(), name="f_BET")

    f_swapdim = pe.Node(interface=fsl.SwapDimensions(), name="f_swapdim")
    f_swapdim.inputs.new_dims = ('x', '-z', '-y')

    f_deleteorient = pe.Node(interface=FSLOrient(), name="f_deleteorient")
    f_deleteorient.inputs.main_option = 'deleteorient'

    datasink = pe.Node(nio.DataSink(), name='datasink')
    datasink.inputs.base_directory = out_dir
    datasink.inputs.parameterization = False

    workflow_connections = [
        (get_f_scan, dummy_scans, [('nii_path', 'in_file')]),
        (get_f_scan, dummy_scans, [('scan_path', 'scan_dir')]),
        (dummy_scans, events_file, [('deleted_scans', 'forced_dummy_scans')]),
        (dummy_scans, f_resize, [('out_file', 'in_file')]),
        (get_f_scan, events_file, [('nii_path', 'timecourse_file'),
                                   ('task', 'task'),
                                   ('scan_path', 'scan_dir')]),
        (events_file, datasink, [('out_file', 'func.@events')]),
        (get_f_scan, events_file, [('events_name', 'out_file')]),
        (get_f_scan, datasink, [(('subject_session', ss_to_path), 'container')
                                ]),
        (temporal_mean, f_percentile, [('out_file', 'in_file')]),
        # here we divide by 10 assuming 10 percent noise
        (f_percentile, f_threshold, [(('out_stat', divideby_10), 'thresh')]),
        (temporal_mean, f_threshold, [('out_file', 'in_file')]),
        (f_threshold, f_fast, [('out_file', 'in_files')]),
        (f_fast, f_bet, [('restored_image', 'in_file')]),
        (f_resize, f_deleteorient, [('out_file', 'in_file')]),
        (f_deleteorient, f_swapdim, [('out_file', 'in_file')]),
    ]

    if realign == "space":
        realigner = pe.Node(interface=spm.Realign(), name="realigner")
        realigner.inputs.register_to_mean = True
        workflow_connections.extend([
            (f_swapdim, realigner, [('out_file', 'in_file')]),
        ])

    elif realign == "spacetime":
        realigner = pe.Node(interface=nipy.SpaceTimeRealigner(),
                            name="realigner")
        realigner.inputs.slice_times = "asc_alt_2"
        realigner.inputs.tr = tr
        realigner.inputs.slice_info = 3  #3 for coronal slices (2 for horizontal, 1 for sagittal)
        workflow_connections.extend([
            (f_swapdim, realigner, [('out_file', 'in_file')]),
        ])

    elif realign == "time":
        realigner = pe.Node(interface=fsl.SliceTimer(), name="slicetimer")
        realigner.inputs.time_repetition = tr
        workflow_connections.extend([
            (f_swapdim, realigner, [('out_file', 'in_file')]),
        ])

    #if structural_scan_types.any():
    #	get_s_scan = pe.Node(name='get_s_scan', interface=util.Function(function=get_bids_scan, input_names=inspect.getargspec(get_bids_scan)[0], output_names=['scan_path','scan_type','task', 'nii_path', 'nii_name', 'file_name', 'events_name', 'subject_session']))
    #	get_s_scan.inputs.ignore_exception = True
    #	get_s_scan.inputs.data_selection = data_selection
    #	get_s_scan.inputs.bids_base = bids_base

    #	s_cutoff = pe.Node(interface=fsl.ImageMaths(), name="s_cutoff")
    #	s_cutoff.inputs.op_string = "-thrP 20 -uthrp 98"

    #	s_resize = pe.Node(interface=VoxelResize(), name="s_resize")

    #	s_BET = pe.Node(interface=fsl.BET(), name="s_BET")
    #	s_BET.inputs.mask = True
    #	s_BET.inputs.frac = 0.3
    #	s_BET.inputs.robust = True

    #	ants_introduction = pe.Node(interface=legacy.antsIntroduction(), name='ants_introduction')
    #	ants_introduction.inputs.dimension = 3
    #	ants_introduction.inputs.reference_image = template
    #	#will need updating to `1`
    #	ants_introduction.inputs.bias_field_correction = True
    #	ants_introduction.inputs.transformation_model = 'GR'
    #	ants_introduction.inputs.max_iterations = [8,15,8]

    #	s_mask = pe.Node(interface=fsl.ApplyMask(), name="s_mask")
    #	s_register, s_warp, f_warp = structural_registration(template)

    #	workflow_connections.extend([
    #		(get_s_scan, s_reg_biascorrect, [('nii_path', 'input_image')]),
    #		(s_reg_biascorrect, s_cutoff, [('output_image', 'in_file')]),
    #		(s_cutoff, s_BET, [('out_file', 'in_file')]),
    #		(s_biascorrect, s_mask, [('output_image', 'in_file')]),
    #		(s_BET, s_mask, [('mask_file', 'mask_file')]),
    #		])

    #	#TODO: incl. in func registration
    #	if autorotate:
    #		workflow_connections.extend([
    #			(s_mask, s_rotated, [('out_file', 'out_file')]),
    #			(s_rotated, s_register, [('out_file', 'moving_image')]),
    #			])
    #	else:
    #		workflow_connections.extend([
    #			(s_mask, s_register, [('out_file', 'moving_image')]),
    #			(s_register, s_warp, [('composite_transform', 'transforms')]),
    #			(get_s_scan, s_warp, [('nii_path', 'input_image')]),
    #			(s_warp, datasink, [('output_image', 'anat')]),
    #			])

    #	if autorotate:
    #		s_rotated = autorotate(template)

    #	workflow_connections.extend([
    #		(get_f_scan, get_s_scan, [('subject_session', 'selector')]),
    #		(get_s_scan, s_warp, [('nii_name','output_image')]),
    #		(get_s_scan, s_biascorrect, [('nii_path', 'input_image')]),
    #		])

    f_antsintroduction = pe.Node(interface=antslegacy.antsIntroduction(),
                                 name='ants_introduction')
    f_antsintroduction.inputs.dimension = 3
    f_antsintroduction.inputs.reference_image = template
    #will need updating to `1`
    f_antsintroduction.inputs.bias_field_correction = True
    f_antsintroduction.inputs.transformation_model = 'GR'
    f_antsintroduction.inputs.max_iterations = [8, 15, 8]

    f_warp = pe.Node(interface=ants.WarpTimeSeriesImageMultiTransform(),
                     name='f_warp')
    f_warp.inputs.reference_image = template
    f_warp.inputs.dimension = 4

    f_copysform2qform = pe.Node(interface=FSLOrient(),
                                name='f_copysform2qform')
    f_copysform2qform.inputs.main_option = 'copysform2qform'

    warp_merge = pe.Node(util.Merge(2), name='warp_merge')

    workflow_connections.extend([
        (f_bet, f_antsintroduction, [('out_file', 'input_image')]),
        (f_antsintroduction, warp_merge, [('warp_field', 'in1')]),
        (f_antsintroduction, warp_merge, [('affine_transformation', 'in2')]),
        (warp_merge, f_warp, [('out', 'transformation_series')]),
        (f_warp, f_copysform2qform, [('output_image', 'in_file')]),
    ])
    if realign == "space":
        workflow_connections.extend([
            (realigner, temporal_mean, [('realigned_files', 'in_file')]),
            (realigner, f_warp, [('realigned_files', 'input_image')]),
        ])
    elif realign == "spacetime":
        workflow_connections.extend([
            (realigner, temporal_mean, [('out_file', 'in_file')]),
            (realigner, f_warp, [('out_file', 'input_image')]),
        ])
    elif realign == "time":
        workflow_connections.extend([
            (realigner, temporal_mean, [('slice_time_corrected_file',
                                         'in_file')]),
            (realigner, f_warp, [('slice_time_corrected_file', 'input_image')
                                 ]),
        ])
    else:
        workflow_connections.extend([
            (f_resize, temporal_mean, [('out_file', 'in_file')]),
            (f_swapdim, f_warp, [('out_file', 'input_image')]),
        ])

    invert = pe.Node(interface=fsl.ImageMaths(), name="invert")

    blur = pe.Node(interface=afni.preprocess.BlurToFWHM(), name="blur")
    blur.inputs.fwhmxy = functional_blur_xy

    if functional_blur_xy and negative_contrast_agent:
        workflow_connections.extend([
            (f_copysform2qform, blur, [('out_file', 'in_file')]),
            (blur, invert, [(('out_file', fslmaths_invert_values), 'op_string')
                            ]),
            (blur, invert, [('out_file', 'in_file')]),
            (get_f_scan, invert, [('nii_name', 'output_image')]),
            (invert, datasink, [('out_file', 'func')]),
        ])

    elif functional_blur_xy:
        workflow_connections.extend([
            (get_f_scan, blur, [('nii_name', 'output_image')]),
            (f_copysform2qform, blur, [('out_file', 'in_file')]),
            (blur, datasink, [('out_file', 'func')]),
        ])

    elif negative_contrast_agent:
        workflow_connections.extend([
            (get_f_scan, invert, [('nii_name', 'out_file')]),
            (f_copysform2qform, invert, [(('out_file', fslmaths_invert_values),
                                          'op_string')]),
            (f_copysform2qform, invert, [('out_file', 'in_file')]),
            (invert, datasink, [('out_file', 'func')]),
        ])
    else:

        f_rename = pe.Node(util.Rename(), name='f_rename')

        workflow_connections.extend([
            (get_f_scan, f_rename, [('nii_name', 'format_string')]),
            (f_copysform2qform, f_rename, [('out_file', 'in_file')]),
            (f_rename, datasink, [('out_file', 'func')]),
        ])

    workflow_config = {
        'execution': {
            'crashdump_dir': path.join(bids_base, 'preprocessing/crashdump'),
        }
    }
    if debug:
        workflow_config['logging'] = {
            'workflow_level': 'DEBUG',
            'utils_level': 'DEBUG',
            'interface_level': 'DEBUG',
            'filemanip_level': 'DEBUG',
            'log_to_file': 'true',
        }

    workdir_name = workflow_name + "_work"
    #this gives the name of the workdir, the output name is passed to the datasink
    workflow = pe.Workflow(name=workdir_name)
    workflow.connect(workflow_connections)
    workflow.base_dir = out_base
    workflow.config = workflow_config
    workflow.write_graph(dotfilename=path.join(workflow.base_dir, workdir_name,
                                               "graph.dot"),
                         graph2use="hierarchical",
                         format="png")

    workflow.run(plugin="MultiProc", plugin_args={'n_procs': n_procs})
    if not keep_work:
        workdir = path.join(workflow.base_dir, workdir_name)
        try:
            shutil.rmtree(workdir)
        except OSError as e:
            if str(e) == 'Cannot call rmtree on a symbolic link':
                print(
                    'Not deleting top level workdir (`{}`), as it is a symlink. Deleting only contents instead'
                    .format(workdir))
                for file_object in os.listdir(workdir):
                    file_object_path = os.path.join(workdir, file_object)
                    if os.path.isfile(file_object_path):
                        os.unlink(file_object_path)
                    else:
                        shutil.rmtree(file_object_path)
            else:
                raise OSError(str(e))
Exemple #9
0
def create_bbregister_workflow(name="bbregister", contrast_type="t2"):

    # Define the workflow inputs
    inputnode = pe.Node(
        util.IdentityInterface(fields=["subject_id", "source_file"]),
        name="inputs")

    # Estimate the registration to Freesurfer conformed space
    func2anat = pe.MapNode(fs.BBRegister(contrast_type=contrast_type,
                                         init="fsl",
                                         epi_mask=True,
                                         registered_file=True,
                                         out_fsl_file=True),
                           iterfield=["source_file"],
                           name="func2anat")

    # Set up a node to grab the target from the subjects directory
    fssource = pe.Node(io.FreeSurferSource(subjects_dir=fs.Info.subjectsdir()),
                       name="fssource")
    # Always overwrite the grab; shouldn't cascade unless the underlying image changes
    fssource.overwrite = True

    # Convert the target to nifti
    convert = pe.Node(fs.MRIConvert(out_type="niigz"), name="convertbrain")

    # Swap dimensions so stuff looks nice in the report
    flipbrain = pe.Node(fsl.SwapDimensions(new_dims=("RL", "PA", "IS")),
                        name="flipbrain")

    flipfunc = pe.MapNode(fsl.SwapDimensions(new_dims=("RL", "PA", "IS")),
                          iterfield=["in_file"],
                          name="flipfunc")

    # Slice up the registration
    func2anatpng = pe.MapNode(fsl.Slicer(middle_slices=True,
                                         show_orientation=False,
                                         scaling=.6,
                                         label_slices=False),
                              iterfield=["in_file"],
                              name="func2anatpng")

    # Rename some files
    pngname = pe.MapNode(util.Rename(format_string="func2anat.png"),
                         iterfield=["in_file"],
                         name="pngname")

    costname = pe.MapNode(util.Rename(format_string="func2anat_cost.dat"),
                          iterfield=["in_file"],
                          name="costname")

    tkregname = pe.MapNode(util.Rename(format_string="func2anat_tkreg.dat"),
                           iterfield=["in_file"],
                           name="tkregname")

    flirtname = pe.MapNode(util.Rename(format_string="func2anat_flirt.mat"),
                           iterfield=["in_file"],
                           name="flirtname")

    # Merge the slicer png and cost file into a report list
    report = pe.Node(util.Merge(2, axis="hstack"), name="report")

    # Define the workflow outputs
    outputnode = pe.Node(
        util.IdentityInterface(fields=["tkreg_mat", "flirt_mat", "report"]),
        name="outputs")

    bbregister = pe.Workflow(name=name)

    # Connect the registration
    bbregister.connect([
        (inputnode, func2anat, [("subject_id", "subject_id"),
                                ("source_file", "source_file")]),
        (inputnode, fssource, [("subject_id", "subject_id")]),
        (func2anat, flipfunc, [("registered_file", "in_file")]),
        (flipfunc, func2anatpng, [("out_file", "in_file")]),
        (fssource, convert, [("brain", "in_file")]),
        (convert, flipbrain, [("out_file", "in_file")]),
        (flipbrain, func2anatpng, [("out_file", "image_edges")]),
        (func2anatpng, pngname, [("out_file", "in_file")]),
        (func2anat, tkregname, [("out_reg_file", "in_file")]),
        (func2anat, flirtname, [("out_fsl_file", "in_file")]),
        (func2anat, costname, [("min_cost_file", "in_file")]),
        (costname, report, [("out_file", "in1")]),
        (pngname, report, [("out_file", "in2")]),
        (tkregname, outputnode, [("out_file", "tkreg_mat")]),
        (flirtname, outputnode, [("out_file", "flirt_mat")]),
        (report, outputnode, [("out", "report")]),
    ])

    return bbregister
Exemple #10
0
def create_vmhc(workflow,
                num_strat,
                strat,
                pipeline_config_object,
                func_key='functional_nuisance_residuals',
                output_name='vmhc'):
    """
    Compute the map of brain functional homotopy, the high degree of synchrony
    in spontaneous activity between geometrically corresponding interhemispheric (i.e., homotopic) regions.



    Parameters
    ----------

    None

    Returns
    -------

    vmhc_workflow : workflow

        Voxel Mirrored Homotopic Connectivity Analysis Workflow



    Notes
    -----

    `Source <https://github.com/FCP-INDI/C-PAC/blob/master/CPAC/vmhc/vmhc.py>`_ 

    Workflow Inputs::

        inputspec.brain : string (existing nifti file)
            Anatomical image(without skull)

        inputspec.symmetric_brain : string (existing nifti file)
            MNI152_T1_2mm_symmetric_brain.nii.gz
 
        inputspec.rest_res_filt : string (existing nifti file)
            Band passed Image with nuisance signal regressed out(and optionally scrubbed). Recommended 
            bandpass filter (0.001,0.1) )

        inputspec.reorient : string (existing nifti file)
            RPI oriented anatomical data

        inputspec.example_func2highres_mat : string (existing affine transformation .mat file)
            Specifies an affine transform that should be applied to the example_func before non linear warping

        inputspec.standard_for_func: string (existing nifti file)
            MNI152_T1_standard_resolution_brain.nii.gz

        inputspec.symmetric_skull : string (existing nifti file)
            MNI152_T1_2mm_symmetric.nii.gz

        inputspec.twomm_brain_mask_dil : string (existing nifti file)
            MNI152_T1_2mm_brain_mask_symmetric_dil.nii.gz

        inputspec.config_file_twomm_symmetric : string (existing .cnf file)
            T1_2_MNI152_2mm_symmetric.cnf

        inputspec.rest_mask : string (existing nifti file)
            A mask functional volume(derived by dilation from motion corrected functional volume)

        fwhm_input.fwhm : list (float) 
            For spatial smoothing the Z-transformed correlations in MNI space.
            Generally the value of this parameter is 1.5 or 2 times the voxel size of the input Image.

        inputspec.mean_functional : string (existing nifti file)
            The mean functional image for use in the func-to-anat registration matrix conversion
            to ITK (ANTS) format, if the user selects to use ANTS.

        
    Workflow Outputs::

        outputspec.highres2symmstandard : string (nifti file)
            Linear registration of T1 image to symmetric standard image

        outputspec.highres2symmstandard_mat : string (affine transformation .mat file)
            An affine transformation .mat file from linear registration and used in non linear registration

        outputspec.highres2symmstandard_warp : string (nifti file)
            warp file from Non Linear registration of T1 to symmetrical standard brain

        outputspec.fnirt_highres2symmstandard : string (nifti file)
            Non Linear registration of T1 to symmetrical standard brain

        outputspec.highres2symmstandard_jac : string (nifti file)
            jacobian determinant image from Non Linear registration of T1 to symmetrical standard brain

        outputspec.rest_res_2symmstandard : string (nifti file)
            nonlinear registration (func to standard) image

        outputspec.VMHC_FWHM_img : string (nifti file)
            pearson correlation between res2standard and flipped res2standard

        outputspec.VMHC_Z_FWHM_img : string (nifti file)
            Fisher Z transform map

        outputspec.VMHC_Z_stat_FWHM_img : string (nifti file)
            Z statistic map

    Order of commands:

    - Perform linear registration of Anatomical brain in T1 space to symmetric standard space. For details 
    see `flirt <http://www.fmrib.ox.ac.uk/fsl/flirt/index.html>`_::

        flirt
        -ref MNI152_T1_2mm_symmetric_brain.nii.gz
        -in mprage_brain.nii.gz
        -out highres2symmstandard.nii.gz
        -omat highres2symmstandard.mat
        -cost corratio
        -searchcost corratio
        -dof 12
        -interp trilinear    
        
    - Perform nonlinear registration (higres to standard) to symmetric standard brain. For details 
    see `fnirt <http://fsl.fmrib.ox.ac.uk/fsl/fnirt/>`_::
    
        fnirt
        --in=head.nii.gz
        --aff=highres2symmstandard.mat
        --cout=highres2symmstandard_warp.nii.gz
        --iout=fnirt_highres2symmstandard.nii.gz
        --jout=highres2symmstandard_jac.nii.gz
        --config=T1_2_MNI152_2mm_symmetric.cnf
        --ref=MNI152_T1_2mm_symmetric.nii.gz
        --refmask=MNI152_T1_2mm_brain_mask_symmetric_dil.nii.gz
        --warpres=10,10,10 

    - Perform spatial smoothing on the input functional image(inputspec.rest_res_filt).  For details 
    see `PrinciplesSmoothing <http://imaging.mrc-cbu.cam.ac.uk/imaging/PrinciplesSmoothing>`_ 
    `fslmaths <http://www.fmrib.ox.ac.uk/fslcourse/lectures/practicals/intro/index.htm>`_::

        fslmaths rest_res_filt.nii.gz
        -kernel gauss FWHM/ sqrt(8-ln(2))
        -fmean -mas rest_mask.nii.gz
        rest_res_filt_FWHM.nii.gz
        
    - Apply nonlinear registration (func to standard). For details see  
    `applywarp <http://www.fmrib.ox.ac.uk/fsl/fnirt/warp_utils.html#applywarp>`_::
        
        applywarp
        --ref=MNI152_T1_2mm_symmetric.nii.gz
        --in=rest_res_filt_FWHM.nii.gz
        --out=rest_res_2symmstandard.nii.gz
        --warp=highres2symmstandard_warp.nii.gz
        --premat=example_func2highres.mat
        
        
    - Copy and L/R swap the output of applywarp command (rest_res_2symmstandard.nii.gz). For details 
    see  `fslswapdim <http://fsl.fmrib.ox.ac.uk/fsl/fsl4.0/avwutils/index.html>`_::

        fslswapdim
        rest_res_2symmstandard.nii.gz
        -x y z
        tmp_LRflipped.nii.gz


    - Calculate pearson correlation between rest_res_2symmstandard.nii.gz and flipped 
    rest_res_2symmstandard.nii.gz(tmp_LRflipped.nii.gz). For details see  
    `3dTcorrelate <http://afni.nimh.nih.gov/pub/dist/doc/program_help/3dTcorrelate.html>`_::
        
        3dTcorrelate
        -pearson
        -polort -1
        -prefix VMHC_FWHM.nii.gz
        rest_res_2symmstandard.nii.gz
        tmp_LRflipped.nii.gz
    
    Workflow:
    
    .. image:: ../images/vmhc_graph.dot.png
        :width: 500 
    
    Workflow Detailed:
    
    .. image:: ../images/vmhc_detailed_graph.dot.png
        :width: 500 
    

    References
    ----------
    
    .. [1] Zuo, X.-N., Kelly, C., Di Martino, A., Mennes, M., Margulies, D. S., Bangaru, S., 
           Grzadzinski, R., et al. (2010). Growing together and growing apart: regional and 
           sex differences in the lifespan developmental trajectories of functional homotopy. 
           The Journal of neuroscience : the official journal of the Society for Neuroscience, 
           30(45), 15034-43. doi:10.1523/JNEUROSCI.2612-10.2010


    Examples
    --------
    
    >>> vmhc_w = create_vmhc()
    >>> vmhc_w.inputs.inputspec.symmetric_brain = 'MNI152_T1_2mm_symmetric_brain.nii.gz'
    >>> vmhc_w.inputs.inputspec.symmetric_skull = 'MNI152_T1_2mm_symmetric.nii.gz'
    >>> vmhc_w.inputs.inputspec.twomm_brain_mask_dil = 'MNI152_T1_2mm_brain_mask_symmetric_dil.nii.gz'
    >>> vmhc_w.inputs.inputspec.config_file_twomm = 'T1_2_MNI152_2mm_symmetric.cnf'
    >>> vmhc_w.inputs.inputspec.standard_for_func= 'MNI152_T1_2mm.nii.gz'
    >>> vmhc_w.inputs.fwhm_input.fwhm = [4.5, 6]
    >>> vmhc_w.get_node('fwhm_input').iterables = ('fwhm', [4.5, 6])
    >>> vmhc_w.inputs.inputspec.rest_res = os.path.abspath('/home/data/s1001/rest_res_filt.nii.gz')
    >>> vmhc_w.inputs.inputspec.reorient = os.path.abspath('/home/data/s1001/anat/mprage_RPI.nii.gz')
    >>> vmhc_w.inputs.inputspec.brain = os.path.abspath('/home/data/s1001/anat/mprage_brain.nii.gz')
    >>> vmhc_w.inputs.inputspec.example_func2highres_mat = os.path.abspath('/home/data/s1001/func2highres.mat')
    >>> vmhc_w.inputs.inputspec.rest_mask = os.path.abspath('/home/data/s1001/func/original/rest_mask.nii.gz')
    >>> vmhc_w.run() # doctest: +SKIP

    """

    nodes = strat.get_nodes_names()

    if not isinstance(func_key, str):
        raise ValueError('func_key should be a string, not a {0}'.format(
            type(func_key)))

    # we begin by smoothing the input file, which should be the current leaf node
    smooth_key = '{0}_smooth'.format(func_key)
    if smooth_key not in strat:
        spatial_smooth(workflow,
                       'leaf',
                       'functional_brain_mask',
                       smooth_key,
                       strat,
                       num_strat,
                       pipeline_config_object,
                       input_image_type='func_4d')

    # next write it to symmetric MNI space
    func_symm_mni_key = 'func_preproc_symm_mni'
    if func_symm_mni_key not in strat:
        output_func_to_standard(workflow,
                                smooth_key,
                                'template_skull_for_func_preproc',
                                func_symm_mni_key,
                                strat,
                                num_strat,
                                pipeline_config_object,
                                input_image_type='func_4d')

    # write out a swapped version of the file
    # copy and L/R swap file
    copy_and_L_R_swap = pe.Node(interface=fsl.SwapDimensions(),
                                name='copy_and_L_R_swap_{0}'.format(num_strat))

    copy_and_L_R_swap.inputs.new_dims = ('-x', 'y', 'z')

    func_node, func_file = strat[func_symm_mni_key]
    workflow.connect(func_node, func_file, copy_and_L_R_swap, 'in_file')

    # calculate correlation between original and swapped images
    pearson_correlation = pe.Node(
        interface=preprocess.TCorrelate(),
        name='pearson_correlation_{0}'.format(num_strat))

    pearson_correlation.inputs.pearson = True
    pearson_correlation.inputs.polort = -1
    pearson_correlation.inputs.outputtype = 'NIFTI_GZ'

    workflow.connect(func_node, func_file, pearson_correlation, 'xset')

    workflow.connect(copy_and_L_R_swap, 'out_file', pearson_correlation,
                     'yset')

    # add the outputs to the resource pool
    strat.update_resource_pool(
        {'vmhc_raw_score': (pearson_correlation, 'out_file')})

    strat.append_name(copy_and_L_R_swap.name)
    strat.append_name(pearson_correlation.name)

    return workflow, strat
Exemple #11
0
def segment_fslFAST(population, afs_dir, workspace_dir):
    count= 0
    #subject = population[subject_index]
    for subject in population:
        count +=1
        print '========================================================================================'
        print '%s- Runnning FSL FAST Segmentation on subject %s_%s' %(count, subject, workspace_dir[-1])
        print ''

        # define subject directory and anatomical file path
        afs_anatomical = os.path.join(afs_dir, 'probands', subject, 'NIFTI', 'MP2RAGE_BRAIN.nii')

        # define destination directory for fsl segmentation outputs
        try:
            os.makedirs(os.path.join(workspace_dir, subject, 'segmentation_fslFAST'))
        except OSError:
            out_dir  = str(os.path.join(workspace_dir, subject, 'segmentation_fslFAST'))
        out_dir  = str(os.path.join(workspace_dir, subject, 'segmentation_fslFAST'))

        '============================================================================================'
        '                       Converting dims of spectre deskulled mp2rage to spm space	     	 '
        '============================================================================================'

        # fslFAST needs a skullstripped brain.
        # BET fails with mp2rage
        # necessary that skullstripped mp2rage is in same orientation as spm mp2rage input
        # swap dims fails to conert to AIL.
        # solution, flip to radiological, swapdim to AIL, and then switch back to neurological.

        # flipping to radiological
        #print 'Forcing radiological..............................'
        shutil.copy(afs_anatomical, out_dir)
        local_anatomical = os.path.join(out_dir, 'MP2RAGE_BRAIN.nii')
        force_radiological = ['fslorient', '-forceradiological', '%s'%local_anatomical]
        subprocess.call(force_radiological)

        #swap dims
        if os.path.isfile(os.path.join(out_dir, 'MP2RAGE_BRAIN_swapdim.nii.gz')):
            print 'Dimensions already swapped .......................... moving on'
        else:
            print 'Swapping dimensions of freesurfer files to AIL....'

        swapdim_t1 = fsl.SwapDimensions()
        swapdim_t1.inputs.in_file     = local_anatomical
        swapdim_t1.inputs.new_dims    = ('AP', 'IS', 'LR')
        swapdim_t1.inputs.out_file    = '%s/MP2RAGE_BRAIN_swapdim.nii.gz' %out_dir
        swapdim_t1.inputs.output_type = 'NIFTI_GZ'
        swapdim_t1.run()

        #flipping to  neurological..............................'
        shutil.copy(afs_anatomical, out_dir)
        anatomical_swap2   = '%s/MP2RAGE_BRAIN_swapdim.nii.gz' %out_dir
        force_neurological = ['fslorient', '-forceneurological', '%s'%anatomical_swap2]
        subprocess.call(force_neurological)


        '============================================================================================'
        '                                     Registration                                           '
        '============================================================================================'

        if os.path.isfile(os.path.join(out_dir, 'MP2RAGE_BRAIN_2spm.nii')):
            print 'MP2RAGE_BRAIN to SPM affine already calculated....... moving on'
        else:
            print 'Running FSL2SPM affine registration..................'
            swapdim_t1     = os.path.join(out_dir, 'MP2RAGE_BRAIN_swapdim.nii.gz')
            freesurf_anat  = os.path.join(workspace_dir, subject, 'segmentation_freesurfer', 'freesurfer_T1_2spm.nii')

            #register freesurfer T1  to SPM space
            anat_flirt = fsl.FLIRT()
            anat_flirt.inputs.in_file         = swapdim_t1
            anat_flirt.inputs.reference       = freesurf_anat
            anat_flirt.inputs.output_type     = "NIFTI"
            anat_flirt.inputs.bins     		  = 256
            anat_flirt.inputs.cost            = 'mutualinfo'
            anat_flirt.inputs.interp 		  = 'nearestneighbour'
            anat_flirt.inputs.searchr_x       = [-90, 90]
            anat_flirt.inputs.searchr_y       = [-90, 90]
            anat_flirt.inputs.searchr_z       = [-90, 90]
            #anat_flirt.inputs.dof     		  = 6
            anat_flirt.inputs.out_file        = '%s/MP2RAGE_BRAIN_2spm.nii' %out_dir
            anat_flirt.inputs.out_matrix_file = '%s/MP2RAGE_BRAIN_2spm.mat' %out_dir
            anat_flirt.run()

        '============================================================================================'
        '                                     Segmentation                                           '
        '============================================================================================'
        # check if the file exists
        if os.path.isfile(os.path.join( os.path.join(workspace_dir, subject, 'segmentation_fslFAST', 'fslFAST_seg_2.nii.gz'))):
            print 'Brain already segmented......... moving on'

        else:

            # define destination directory for spm segmentation outputs
            try:
                os.makedirs(os.path.join(workspace_dir, subject, 'segmentation_fslFAST'))
            except OSError:
                out_dir  = str(os.path.join(workspace_dir, subject, 'segmentation_fslFAST'))
            out_dir  = str(os.path.join(workspace_dir, subject, 'segmentation_fslFAST'))

            # run FSL FAST  segmentation
            print '..... Running FSL FAST Segmentation '
            os.chdir(out_dir)
            seg                             = fsl.FAST()
            seg.inputs.in_files             = '%s/MP2RAGE_BRAIN_2spm.nii' %out_dir
            seg.inputs.out_basename         = 'fslFAST'
            seg.inputs.segments             = True
            seg.inputs.probability_maps     = True
            seg.run()

        print '========================================================================================'
Exemple #12
0
dog_preproc_wf.connect(image_dims, ('vox_size_x', fmt_string),
                       resample_axial_mask_isotropic, 'args')
dog_preproc_wf.connect(crop_axial_mask_image, 'roi_file',
                       resample_axial_mask_isotropic, 'in_file')
dog_preproc_wf.connect(crop_axial_mask_image, 'roi_file',
                       resample_axial_mask_isotropic, 'reference')

## so further complciationg things... I also have to crop out the non brian stuff..

## need to add the n4 bias node... at some point
#dog_preproc_wf.connect(axial_n4bias_node,'output_image',resample_axial_isotropic,'in_file')
#dog_preproc_wf.connect(axial_n4bias_node,'output_image',resample_axial_isotropic,'reference')

## first need to get it into the ~ orientation as the axial image..
swap_sagittal = pe.Node(interface=fsl.SwapDimensions(), name='swap_sagittal')
swap_sagittal.inputs.new_dims = ('z', 'x', 'y')
swap_sagittal.inputs.output_type = 'NIFTI_GZ'
dog_preproc_wf.connect(dogscan_datasource, 'sag_t2', swap_sagittal, 'in_file')

### next step is to use the axial mask i generated and transform it to the sagittal image
## so I can crop the images and work with a smaller FOV.. also get better reg

## wILL DEBATE IF I REGISTER THE ORIGINAL IMAGE OR WORK WITH A CROPPED IMAGE... MAY NOT MATTER

reg_input_sagittal_to_axial = pe.Node(interface=fsl.FLIRT(),
                                      name='reg_input_sagittal_to_axial')
reg_input_sagittal_to_axial.inputs.dof = 6
reg_input_sagittal_to_axial.inputs.searchr_x = [-10, 10]
reg_input_sagittal_to_axial.inputs.searchr_x = [-10, 10]
reg_input_sagittal_to_axial.inputs.searchr_x = [-10, 10]
Exemple #13
0
def freesufer_create_tissues(population, workspace_dir, freesurfer_dir):

    #1. get aseg files
    #2. convert asegs to niftis
    #3. swaps dims to SPM orientation
    #4. register freesurfer files to spm space
    #5. create tissue masks from labels

    #subject = population[subject_index]
    count = 0
    for subject in population:
        count += 1
        print '========================================================================================'
        print '%s- Grabbing FREESURFER reconall for subject %s_%s' % (
            count, subject, workspace_dir[-1])

        subject_dir = os.path.join(workspace_dir, subject)
        anatomical_dir = os.path.join(subject_dir, 'anatomical_original')
        anatomical_file = os.path.join(anatomical_dir, 'ANATOMICAL.nii')

        # check if the file exists
        if os.path.isfile(anatomical_file):
            print '..'
            if os.path.isfile(
                    os.path.join(freesurfer_dir, subject, 'mri', 'aseg.mgz')):
                print 'Brain already segmented .......................... moving on'
                #print 'check data here ---> %s' %(os.path.join(freesurfer_dir, subject))
            else:
                print 'Run reconall for GTS_control_%s and then come back to me' % subject
        else:
            print 'anatomical file for subject %s not found' % subject

        '============================================================================================'
        '                           Convert Freesurfer MGZs to Nifti'
        '============================================================================================'

        seg_dir = os.path.join(freesurfer_dir, subject)
        t1_mgz = os.path.join(seg_dir, 'mri',
                              'T1.mgz')  # T1 image in freesurfer orientation
        aseg_mgz = os.path.join(seg_dir, 'mri',
                                'aseg.mgz')  # freesurfer segmentation file

        if os.path.isfile(
                os.path.join(workspace_dir, subject, 'segmentation_freesurfer',
                             'aseg.nii')):
            print 'MGZs already converted to NIFTI .................. moving on'
        else:
            print 'converting Freesurfer MGZ files to NIFTI......'

            try:
                os.makedirs(
                    os.path.join(workspace_dir, subject,
                                 'segmentation_freesurfer'))
            except OSError:
                out_fs_dir = str(
                    os.path.join(workspace_dir, subject,
                                 'segmentation_freesurfer'))
            out_fs_dir = str(
                os.path.join(workspace_dir, subject,
                             'segmentation_freesurfer'))

            #convert T1 to nifti
            anat2nii = fs.MRIConvert()
            anat2nii.inputs.in_file = t1_mgz
            anat2nii.inputs.out_file = '%s/T1.nii' % out_fs_dir
            anat2nii.inputs.out_type = 'nii'
            anat2nii.run()
            #convert seg to nifti
            seg2nii = fs.MRIConvert()
            seg2nii.inputs.in_file = aseg_mgz
            seg2nii.inputs.out_file = '%s/aseg.nii' % out_fs_dir
            seg2nii.inputs.out_type = 'nii'
            seg2nii.run()

        '============================================================================================'
        '                           SWAP dims to SPM orientation -- AP IS LR -- 					 '
        '============================================================================================'
        out_fs_dir = str(
            os.path.join(workspace_dir, subject, 'segmentation_freesurfer'))

        if os.path.isfile(os.path.join(out_fs_dir, 'aseg_swapdim.nii.gz')):
            print 'Dimensions already swapped ....................... moving on'
        else:
            print 'Swapping dimensions of freesurfer files to AIL'
            fs_t1_nii = os.path.join(out_fs_dir, 'T1.nii')
            fs_aseg_nii = os.path.join(out_fs_dir, 'aseg.nii')

            swapdim_t1 = fsl.SwapDimensions()
            swapdim_t1.inputs.in_file = fs_t1_nii
            swapdim_t1.inputs.new_dims = ('AP', 'IS', 'LR')
            swapdim_t1.inputs.out_file = '%s/T1_swapdim.nii.gz' % out_fs_dir
            swapdim_t1.inputs.output_type = 'NIFTI_GZ'
            swapdim_t1.run()

            swapdim_aseg = fsl.SwapDimensions()
            swapdim_aseg.inputs.in_file = fs_aseg_nii
            swapdim_aseg.inputs.new_dims = ('AP', 'IS', 'LR')
            swapdim_aseg.inputs.out_file = '%s/aseg_swapdim.nii.gz' % out_fs_dir
            swapdim_aseg.inputs.output_type = 'NIFTI_GZ'
            swapdim_aseg.run()

        '============================================================================================'
        '                                     Registration                                           '
        '============================================================================================'

        if os.path.isfile(
                os.path.join(out_fs_dir, 'freesurfer_aseg_2spm.nii.gz')):
            print 'Freesurfer to SPM affine already calculated....... moving on'
        else:
            print 'Registring freesurfer affines to SPM space ..........'
            anat_spm = os.path.join(workspace_dir, subject,
                                    'anatomical_original', 'ANATOMICAL.nii')
            fs_t1_swapdim = os.path.join(out_fs_dir, 'T1_swapdim.nii.gz')
            fs_aseg_swapdim = os.path.join(out_fs_dir, 'aseg_swapdim.nii.gz')

            #register freesurfer T1  to SPM space
            anat_flirt = fsl.FLIRT()
            anat_flirt.inputs.in_file = fs_t1_swapdim
            anat_flirt.inputs.reference = anat_spm
            anat_flirt.inputs.output_type = "NIFTI"
            anat_flirt.inputs.bins = 256
            anat_flirt.inputs.cost = 'mutualinfo'
            anat_flirt.inputs.searchr_x = [-90, 90]
            anat_flirt.inputs.searchr_y = [-90, 90]
            anat_flirt.inputs.searchr_z = [-90, 90]
            #anat_flirt.inputs.dof     		  = 6
            anat_flirt.inputs.interp = 'nearestneighbour'
            anat_flirt.inputs.out_file = '%s/freesurfer_T1_2spm.nii' % out_fs_dir
            anat_flirt.inputs.out_matrix_file = '%s/freesurfer_T1_2spm_xfm.mat' % out_fs_dir
            anat_flirt.run()

            # Apply fs2spm xfm to aseg file
            fs2spm_xfm = '%s/freesurfer_T1_2spm_xfm.mat' % out_fs_dir
            aseg_applyxfm = fsl.ApplyXfm()
            aseg_applyxfm.inputs.in_file = fs_aseg_swapdim
            aseg_applyxfm.inputs.reference = anat_spm
            aseg_applyxfm.inputs.interp = 'nearestneighbour'
            aseg_applyxfm.inputs.in_matrix_file = fs2spm_xfm
            aseg_applyxfm.inputs.apply_xfm = True
            aseg_applyxfm.inputs.out_file = '%s/freesurfer_aseg_2spm.nii.gz' % out_fs_dir
            aseg_applyxfm.run()

            '============================================================================================'
            '                                     Create Tissue Masks                                    '
            '============================================================================================'
        if os.path.isfile(os.path.join(out_fs_dir,
                                       'freesurfer_GM_mask.nii.gz')):
            print 'Tissues already extracted as nifti files.......... moving on'
        else:
            print 'Extracting Tissue classes from Labels and saving as a nifti file'
            aseg = str(
                os.path.join(workspace_dir, subject, 'segmentation_freesurfer',
                             'freesurfer_aseg_2spm.nii.gz'))
            aseg_data = nb.load(aseg).get_data()
            aseg_affine = nb.load(aseg).get_affine()

            wm_data = np.zeros(aseg_data.shape)
            gm_data = np.zeros(aseg_data.shape)
            csf_data = np.zeros(aseg_data.shape)

            for data, labels in zip([wm_data, gm_data, csf_data],
                                    [wm_labels, gm_labels, csf_labels]):
                for label in labels:
                    data[np.where(aseg_data == label)] = 1
            wm_img = nb.Nifti1Image(wm_data, aseg_affine)
            gm_img = nb.Nifti1Image(gm_data, aseg_affine)
            csf_img = nb.Nifti1Image(csf_data, aseg_affine)

            nb.save(wm_img, '%s/freesurfer_WM_mask.nii.gz' % out_fs_dir)
            nb.save(gm_img, '%s/freesurfer_GM_mask.nii.gz' % out_fs_dir)
            nb.save(csf_img, '%s/freesurfer_CSF_mask.nii.gz' % out_fs_dir)