Example #1
0
def create_workflow_to_resample_baw_files(name="ResampleBAWOutputs"):
    """
    This function...
    :param name:
    :return:
    """
    workflow = Workflow(name)
    inputs_to_resample = ["t1_file", "t2_file", "hncma_file", "abc_file"]
    other_inputs = ["reference_file", "acpc_transform"]
    label_maps = ["hncma_file", "abc_file"]
    input_spec = Node(IdentityInterface(inputs_to_resample + other_inputs),
                      name="input_spec")
    output_spec = Node(IdentityInterface(inputs_to_resample),
                       name="output_spec")
    for input in inputs_to_resample:
        node = Node(BRAINSResample(), "Resample_{0}".format(input))
        node.inputs.pixelType = "short"
        node.inputs.inverseTransform = True
        node.inputs.outputVolume = input + ".nii.gz"
        if input in label_maps:
            node.inputs.interpolationMode = "NearestNeighbor"
        workflow.connect([
            (input_spec, node, [("reference_file", "referenceVolume"),
                                ("acpc_transform", "warpTransform"),
                                ("{0}".format(input), "inputVolume")]),
            (node, output_spec, [("outputVolume", "{0}".format(input))])
        ])
    return workflow
Example #2
0
def make_workflow():

    flairs = [os.path.abspath(i) for i in glob.glob(args.flair)]
    weights = [os.path.abspath(i) for i in glob.glob(args.weights)]
    weights_source = Node(interface=IdentityInterface(fields=['weights']),
                          name='weights_source')
    weights_source.inputs.weights = weights

    data_source = Node(IdentityInterface(fields=['flairs']),
                       name='data_source')
    data_source.iterables = ('flairs', flairs)

    sink = Node(interface=DataSink(), name='sink')
    sink.inputs.base_directory = wmh_dir
    sink.inputs.substitutions = [
        ('_flairs_', ''),
        ('_FLAIR.nii.gz/', '/'),
    ]
    sink.inputs.regexp_substitutions = [
        ('\.\..*\.\.', ''),
    ]

    test_wf = ibbmTum_wf.get_test_wf(row_st=192, cols_st=192, thres_mask=10)

    wmh = Workflow(name='wmh', base_dir=wf_temp)

    wmh.connect(weights_source, 'weights', test_wf, 'inputspec.weights')
    wmh.connect(data_source, 'flairs', test_wf, 'inputspec.flair')
    wmh.connect(test_wf, 'outputspec.wmh_mask', sink, '@pred')

    return wmh
Example #3
0
def crop_wf(usemodel):
    """Create a workflow to to crop the image. This workflow requires a
    model image (e.g. an MNI standard) and points on that image. The model
    is registered to the T1 image, and the points transformed into T1 space.
    The image is cut to a box containing all of the transformed points.
    All cuts are in voxel coordinates.

    :return: A :py:mod:`nipype` workflow

    Workflow inputs/outputs

    :param inputspec.T1: The T1 image to remove the neck from
    :param inputspec.model: The reference image to register to the T1 image
    :param inputspec.points: Points file (tsv file with x, y, z, and index (ignored),
                             representing the limits in model space
    :return: A :py:mod:`nipype` node

    """
    name = 'crop'
    wf = pe.Workflow(name)
    inputspec = pe.Node(IdentityInterface(['T1', 'model', 'points']), name='inputspec')
    cut = pe.Node(CutImage(neckonly=False), name='cut')
    outputspec = pe.Node(IdentityInterface(['cropped']), name='outputspec')
    if usemodel:
        trpoints = _tr_points_wf()
        wf.connect([(inputspec, trpoints, [('T1', 'inputspec.T1'),
                                           ('model', 'inputspec.model'),
                                           ('points', 'inputspec.points')]),
                    (trpoints, cut, [('outputspec.out_points', 'points_file')])])
    else:
        wf.connect([(inputspec, cut, [('points', 'points_file')])])
    wf.connect([(inputspec, cut, [('T1', 'in_file')]),
                (cut, outputspec, [('out_file', 'cropped')])])
    return wf
Example #4
0
def init_mpm_wf(me_params, mtsat_params):
    inputnode = Node(IdentityInterface(fields=['pdw_file', 't1w_file', 'mtw_file',
                                               'pdw_cal', 't1w_cal', 'mtw_cal']),
                     name='inputnode')
    outputnode = Node(IdentityInterface(fields=['pd_map', 'r1_map', 'r2s_map', 'mtsat_map']),
                      name='outputnode')

    bet = Node(BET(mask=True, no_output=True), name='brain_mask')
    mpm = Node(MPMR2s(sequence=me_params, verbose=True), name='MPM_R2s')
    mtsat = Node(MTSat(sequence=mtsat_params, verbose=True), name='MPM_MTSat')

    wf = Workflow(name='Multi-Parametric-Mapping')
    wf.connect([(inputnode, bet, [('t1w_file', 'in_file')]),
                (inputnode, mpm, [('pdw_file', 'pdw_file'),
                                  ('t1w_file', 't1w_file'),
                                  ('mtw_file', 'mtw_file')]),
                (bet, mpm, [('mask_file', 'mask_file')]),
                (mpm, mtsat, [('s0_pdw', 'pdw_file'),
                              ('s0_t1w', 't1w_file'),
                              ('s0_mtw', 'mtw_file')]),
                (bet, mtsat, [('mask_file', 'mask_file')]),
                (mpm, outputnode, [('r2s_map', 'r2s_map')]),
                (mtsat, outputnode, [('s0_map', 'pd_map'),
                                     ('r1_map', 'r1_map'),
                                     ('delta_map', 'mtsat_map')])])
    return wf
Example #5
0
def neck_removal_wf(usemodel):
    """Create a workflow to to remove the neck. This workflow requires a
    model image (e.g. an MNI standard) and points on that image. The model
    is registered to the T1 image, and the points transformed into T1 space.
    The inferior most transformed point is used to determine the cutting
    plane, which is aligned with the voxel coordinates.

    :return: A :py:mod:`nipype` workflow

    Workflow inputs/outputs

    :param inputspec.T1: The T1 image to remove the neck from
    :param inputspec.model: The reference image to register to the T1 image
    :param inputspec.limits: Points in model roughly indicating the ideal cutting plane
    :return: A :py:mod:`nipype` node

    """
    name = 'neck_removal'
    wf = pe.Workflow(name)
    inputspec = pe.Node(IdentityInterface(['T1', 'model', 'limits']), name='inputspec')
    wpoints = pe.Node(Function(input_names=['limits'], output_names=['points'], function=writepoints), name='write_points')
    cut = pe.Node(CutImage(neckonly=True), name='cut')
    outputspec = pe.Node(IdentityInterface(['cropped']), name='outputspec')
    if usemodel:
        trpoints = _tr_points_wf()
        wf.connect([(inputspec, trpoints, [('T1', 'inputspec.T1'),
                                           ('model', 'inputspec.model')]),
                    (wpoints, trpoints, [('points', 'inputspec.points')]),
                    (trpoints, cut, [('outputspec.out_points', 'points_file')])])
    else:
        wf.connect([(wpoints, cut, [('points', 'points_file')])])
    wf.connect([(inputspec, wpoints, [('limits', 'limits')]),
                (inputspec, cut, [('T1', 'in_file')]),
                (cut, outputspec, [('out_file', 'cropped')])])
    return wf
Example #6
0
def init_b1_mcf(rf_pulse=None, scale=150):
    inputnode = Node(IdentityInterface(fields=['2db1map_file', 'ref_file']),
                     name='inputnode')
    outputnode = Node(IdentityInterface(fields=['b1_plus', 'b1_pulse']),
                      name='outputnode')

    b1_b1 = Node(ExtractROI(t_min=0, t_size=1), name='b1_extract_b1')
    b1_filter = Node(Filter(filter_spec='Gauss,3.0'), name='b1_filter')
    b1_mag = Node(ExtractROI(t_min=1, t_size=1), name='b1_extract_mag')

    b1_reg = Node(FLIRT(out_file='b1mag_reg.nii.gz',
                        out_matrix_file='b1mag_reg.mat'),
                  name='b1_reg')
    b1_invert = Node(ConvertXFM(invert_xfm=True), name='b1_invert')
    b1_apply = Node(FLIRT(apply_xfm=True), name='b1_reg_apply')
    b1_scale = Node(ImageMaths(op_string='-div %f' % scale), name='b1_scale')

    wf = Workflow(name='b1_prep')
    wf.connect([(inputnode, b1_b1, [('2db1map_file', 'in_file')]),
                (inputnode, b1_mag, [('2db1map_file', 'in_file')]),
                (inputnode, b1_reg, [('ref_file', 'in_file')]),
                (inputnode, b1_apply, [('ref_file', 'reference')]),
                (b1_mag, b1_reg, [('roi_file', 'reference')]),
                (b1_reg, b1_invert, [('out_matrix_file', 'in_file')]),
                (b1_invert, b1_apply, [('out_file', 'in_matrix_file')]),
                (b1_b1, b1_filter, [('roi_file', 'in_file')]),
                (b1_filter, b1_apply, [('out_file', 'in_file')]),
                (b1_apply, b1_scale, [('out_file', 'in_file')]),
                (b1_scale, outputnode, [('out_file', 'b1_plus')])])
    if rf_pulse:
        b1_rf = Node(RFProfile(rf=rf_pulse, out_file='b1_rf.nii.gz'),
                     name='b1_rf')
        wf.connect([(b1_scale, b1_rf, [('out_file', 'in_file')]),
                    (b1_rf, outputnode, [('out_file', 'b1_pulse')])])
    return wf
Example #7
0
def create_ffx_workflow(name="mni_ffx",
                        space="mni",
                        contrasts=None,
                        exp_info=None):
    """Return a workflow object to execute a fixed-effects mode."""
    if contrasts is None:
        contrasts = []
    if exp_info is None:
        exp_info = lyman.default_experiment_parameters()

    inputnode = Node(IdentityInterface([
        "copes", "varcopes", "masks", "means", "dofs", "ss_files", "anatomy",
        "reg_file", "timeseries"
    ]),
                     name="inputnode")

    # Fit the fixedfx model for each contrast
    ffxmodel = Node(FFXModel(contrasts=contrasts), "ffxmodel")

    # Calculate the fixed effects Rsquared maps
    ffxsummary = Node(FFXSummary(), "ffxsummary")

    # Plot the fixedfx results
    report = Node(FFXReport(space=space), "report")

    # Save the experiment info
    saveparams = Node(SaveParameters(exp_info=exp_info), "saveparams")

    outputnode = Node(
        IdentityInterface([
            "flame_results", "r2_files", "tsnr_file", "mean_file",
            "summary_report", "json_file", "zstat_report"
        ]), "outputs")

    ffx = Workflow(name=name)
    ffx.connect([
        (inputnode, ffxmodel, [("copes", "copes"), ("varcopes", "varcopes"),
                               ("dofs", "dofs"), ("masks", "masks"),
                               ("reg_file", "reg_file")]),
        (inputnode, ffxsummary, [("ss_files", "ss_files"), ("means", "means"),
                                 ("timeseries", "timeseries")]),
        (inputnode, report, [("anatomy", "anatomy"), ("masks", "masks")]),
        (inputnode, saveparams, [("timeseries", "in_file")]),
        (ffxmodel, report, [("zstat_files", "zstat_files")]),
        (ffxsummary, report, [("r2_files", "r2_files"),
                              ("tsnr_file", "tsnr_file")]),
        (ffxmodel, outputnode, [("flame_results", "flame_results")]),
        (ffxsummary, outputnode, [("r2_files", "r2_files"),
                                  ("tsnr_file", "tsnr_file"),
                                  ("mean_file", "mean_file")]),
        (report, outputnode, [("summary_files", "summary_report"),
                              ("zstat_files", "zstat_report")]),
        (saveparams, outputnode, [("json_file", "json_file")]),
    ])

    return ffx, inputnode, outputnode
Example #8
0
def create_bbregister_workflow(name="bbregister",
                               contrast_type="t2",
                               partial_brain=False,
                               init_with="fsl"):
    """Find a linear transformation to align the EPI file with the anatomy."""
    in_fields = ["subject_id", "timeseries"]
    if partial_brain:
        in_fields.append("whole_brain_template")
    inputnode = Node(IdentityInterface(in_fields), "inputs")

    # Take the mean over time to get a target volume
    meanvol = MapNode(fsl.MeanImage(), "in_file", "meanvol")

    # Do a rough skullstrip using BET
    skullstrip = MapNode(fsl.BET(), "in_file", "bet")

    # Estimate the registration to Freesurfer conformed space
    func2anat = MapNode(
        fs.BBRegister(contrast_type=contrast_type,
                      init=init_with,
                      epi_mask=True,
                      registered_file=True,
                      out_reg_file="func2anat_tkreg.dat",
                      out_fsl_file="func2anat_flirt.mat"), "source_file",
        "func2anat")

    # Make an image for quality control on the registration
    report = MapNode(CoregReport(), "in_file", "coreg_report")

    # Define the workflow outputs
    outputnode = Node(IdentityInterface(["tkreg_mat", "flirt_mat", "report"]),
                      "outputs")

    bbregister = Workflow(name=name)

    # Connect the registration
    bbregister.connect([
        (inputnode, func2anat, [("subject_id", "subject_id")]),
        (inputnode, report, [("subject_id", "subject_id")]),
        (inputnode, meanvol, [("timeseries", "in_file")]),
        (meanvol, skullstrip, [("out_file", "in_file")]),
        (skullstrip, func2anat, [("out_file", "source_file")]),
        (func2anat, report, [("registered_file", "in_file")]),
        (func2anat, outputnode, [("out_reg_file", "tkreg_mat")]),
        (func2anat, outputnode, [("out_fsl_file", "flirt_mat")]),
        (report, outputnode, [("out_file", "report")]),
    ])

    # Possibly connect the full_fov image
    if partial_brain:
        bbregister.connect([
            (inputnode, func2anat, [("whole_brain_template",
                                     "intermediate_file")]),
        ])

    return bbregister
Example #9
0
def create_reg_workflow(name="reg",
                        space="mni",
                        regtype="model",
                        method="fsl",
                        residual=False,
                        cross_exp=False):
    """Flexibly register files into one of several common spaces."""

    # Define the input fields flexibly
    if regtype == "model":
        fields = ["copes", "varcopes", "sumsquares"]
    elif regtype == "timeseries":
        fields = ["timeseries"]

    if cross_exp:
        fields.extend(["first_rigid"])

    fields.extend(["means", "masks", "rigids"])

    if space == "mni":
        fields.extend(["affine", "warpfield"])
    else:
        fields.extend(["tkreg_rigid"])

    inputnode = Node(IdentityInterface(fields), "inputnode")

    # Grap the correct interface class dynamically
    interface_name = "{}{}Registration".format(space.upper(),
                                               regtype.capitalize())
    reg_interface = globals()[interface_name]
    transform = Node(reg_interface(method=method), "transform")

    # Sanity check on inputs
    if regtype == "model" and residual:
        raise ValueError("residual and regtype=model does not make sense")

    # Set the kind of timeseries
    if residual:
        transform.inputs.residual = True

    outputnode = Node(IdentityInterface(["out_files"]), "outputnode")

    # Define the workflow
    regflow = Workflow(name=name)

    # Connect the inputs programatically
    for field in fields:
        regflow.connect(inputnode, field, transform, field)

    # The transform node only ever has one output
    regflow.connect(transform, "out_files", outputnode, "out_files")

    return regflow, inputnode, outputnode
    def make_neuromet1_workflow(self):

        # Infosource: Iterate through subject names
        infosource = Node(interface=IdentityInterface(fields=['subject_id']),
                          name="infosource")
        infosource.iterables = ('subject_id', self.subject_list)

        # unidensource, return for every subject uni and den
        unidensource = Node(interface=IdentityInterface(fields=['uniden']),
                            name="unidensource")
        unidensource.iterables = ('uniden', ['UNI', 'DEN'])

        info = dict(t1=[['subject_id', 'subject_id', 'uniden']])

        datasource = Node(interface=DataGrabber(
            infields=['subject_id', 'uniden'], outfields=['t1']),
                          name='datasource')
        datasource.inputs.base_directory = self.w_dir
        datasource.inputs.template = '{prefix}%s/{prefix}%s.%s_mp2rage_orig.nii.gz'.format(
            prefix=self.subject_prefix)
        datasource.inputs.template_args = info
        datasource.inputs.sort_filelist = False

        sink = self.make_sink()

        segment = self.make_segment()

        mask = self.make_mask()

        neuromet = Workflow(name=self.subject_prefix, base_dir=self.temp_dir)
        neuromet.connect(infosource, 'subject_id', datasource, 'subject_id')
        neuromet.connect(unidensource, 'uniden', datasource, 'uniden')
        neuromet.connect(datasource, 't1', segment, 'ro.in_file')

        # neuromet.connect()
        neuromet.connect(segment, 'spm_tissues_split.gm', mask,
                         'sum_tissues1.in_file')
        neuromet.connect(segment, 'spm_tissues_split.wm', mask,
                         'sum_tissues1.operand_files')
        neuromet.connect(segment, 'spm_tissues_split.csf', mask,
                         'sum_tissues2.operand_files')
        neuromet.connect(segment, 'spm_tissues_split.gm', sink, '@gm')
        neuromet.connect(segment, 'spm_tissues_split.wm', sink, '@wm')
        neuromet.connect(segment, 'spm_tissues_split.csf', sink, '@csf')
        neuromet.connect(segment, 'seg.bias_corrected_images', sink,
                         '@biascorr')

        # neuromet.connect(comb_imgs, 'uni_brain_den_surr_add.out_file', sink, '@img')
        neuromet.connect(mask, 'gen_mask.out_file', sink, '@mask')
        neuromet.connect(segment, 'ro.out_file', sink, '@ro')

        return neuromet
Example #11
0
def workflow_spec(name="{workflow_name}", exp_info=None):
    """Return a Nipype workflow for MR processing.

    Parameters
    ----------
    name : string
        workflow object name
    exp_info : dict
        dictionary with experimental information
    """
    workflow = Workflow(name)

    if exp_info is None:
        exp_info = fitz.default_experiment_parameters()

    # Define the inputs for the preprocessing workflow
    in_fields = [""]  # "timeseries"]

    inputnode = Node(IdentityInterface(in_fields), "inputs")
    """
    # Define Actual Nipype Nodes, Workflows, etc.
    # e.g. The start of an example SPM preproc workflow
    # --------------------------------------------------

    slicetiming = pe.Node(interface=spm.SliceTiming(), name="slicetiming")
    slicetiming.inputs.ref_slice = 1
    realign = pe.Node(interface=spm.Realign(), name="realign")
    realign.inputs.register_to_mean = True
    """
    workflow.connect([
        """
        (inputnode, slicetiming,
            [('timeseries', 'in_files')]),
        (slicetiming, realign,
            [('timecorrected_files', 'in_files')]),
        """
    ])

    output_fields = [""]  # realigned_files", "realignment_parameters"]

    outputnode = Node(IdentityInterface(output_fields), "outputs")

    workflow.connect([
        """
        (realign, outputnode,
            [("realigned_files", "realigned_files"),
             ("realignment_parameters", "realignment_parameters")]),
        """
    ])

    # Return the workflow itself and input and output nodes.
    return workflow, inputnode, outputnode
Example #12
0
def create_machine_learning_workflow(name="CreateEdgeProbabilityMap",
                                     resample=True,
                                     plugin_args=None):
    """
    This function...
    :param name:
    :param resample:
    :param plugin_args:
    :return:
    """
    workflow = Workflow(name)
    input_spec = Node(IdentityInterface([
        "rho", "phi", "theta", "posteriors", "t1_file", "acpc_transform",
        "gm_classifier_file", "wm_classifier_file"
    ]),
                      name="input_spec")

    predict_edge_probability = Node(PredictEdgeProbability(),
                                    name="PredictEdgeProbability")
    if plugin_args:
        predict_edge_probability.plugin_args = plugin_args
    workflow.connect([(input_spec, predict_edge_probability,
                       [("t1_file", "t1_file"),
                        ("gm_classifier_file", "gm_classifier_file"),
                        ("wm_classifier_file", "wm_classifier_file")])])

    if resample:
        collect_features = Node(CollectFeatureFiles(),
                                name="CollectFeatureFiles")
        collect_features.inputs.inverse_transform = True
        workflow.connect([(input_spec, collect_features,
                           [("rho", "rho"), ("phi", "phi"), ("theta", "theta"),
                            ("posteriors", "posterior_files"),
                            ("t1_file", "reference_file"),
                            ("acpc_transform", "transform_file")])])

        workflow.connect([(collect_features, predict_edge_probability,
                           [("feature_files", "additional_files")])])
    else:
        print("workflow not yet created")
        # TODO: create workflow that does not resample the input images
        return

    output_spec = Node(IdentityInterface(
        ["gm_probability_map", "wm_probability_map"]),
                       name="output_spec")
    workflow.connect(predict_edge_probability, "gm_edge_probability",
                     output_spec, "gm_probability_map")
    workflow.connect(predict_edge_probability, "wm_edge_probability",
                     output_spec, "wm_probability_map")

    return workflow
Example #13
0
def create_slicetime_workflow(name="slicetime",
                              TR=2,
                              slice_order="up",
                              interleaved=False):

    inputnode = Node(IdentityInterface(["timeseries"]), "inputs")

    if isinstance(interleaved, str) and interleaved.lower() == "siemens":

        sliceorder = MapNode(SiemensSliceOrder(), "in_file", "sliceorder")
        slicetimer_set_interleaved = False
        slicetimer_iterfields = ["in_file", "custom_order"]

    elif isinstance(interleaved, bool):

        sliceorder = None
        slicetimer_set_interleaved = interleaved
        slicetimer_iterfields = ["in_file"]

    else:

        raise ValueError("interleaved must be True, False, or 'siemens'")

    slicetimer = MapNode(fsl.SliceTimer(time_repetition=TR),
                         slicetimer_iterfields, "slicetime")

    if slicetimer_set_interleaved:
        slicetimer.inputs.interleaved = True

    if slice_order == "down":
        slicetimer.inputs.index_dir = True
    elif slice_order != "up":
        raise ValueError("slice_order must be 'up' or 'down'")

    outputnode = Node(IdentityInterface(["timeseries"]), "outputs")

    slicetime = Workflow(name)
    slicetime.connect([
        (inputnode, slicetimer, [("timeseries", "in_file")]),
        (slicetimer, outputnode, [("slice_time_corrected_file", "timeseries")
                                  ]),
    ])

    if sliceorder is not None:
        slicetime.connect([
            (inputnode, sliceorder, [("timeseries", "in_file")]),
            (sliceorder, slicetimer, [("out_file", "custom_order")]),
        ])

    return slicetime
Example #14
0
def make_simple_workflow():

    wf = Workflow(name="test")

    node1 = Node(IdentityInterface(fields=["foo"]), name="node1")
    node2 = MapNode(IdentityInterface(fields=["foo"]),
                    name="node2",
                    iterfield=["foo"])
    node3 = Node(IdentityInterface(fields=["foo"]), name="node3")

    wf.connect([
        (node1, node2, [("foo", "foo")]),
        (node2, node3, [("foo", "foo")]),
    ])

    return wf, node1, node2, node3
Example #15
0
def _tr_points_wf():
    wf = pe.Workflow('trpointswf')
    inputspec = pe.Node(IdentityInterface(['T1', 'model', 'points']), 'inputspec')
    reg = pe.Node(ants_registration_affine_node(write_composite_transform=False), name='register')
    convertpoints = pe.Node(ConvertPoints(in_format='tsv', out_format='ants'), 'convert_points')
    trpoints = pe.Node(ApplyTransformsToPoints(dimension=3), name='transform_points')
    convertpoints2 = pe.Node(ConvertPoints(in_format='ants', out_format='tsv'), 'convert_points2')
    outputspec = pe.Node(IdentityInterface(['out_points']), 'outputspec')
    wf.connect([(inputspec, reg, [('T1', 'moving_image'),
                                  ('model', 'fixed_image')]),
                (inputspec, convertpoints, [('points', 'in_file')]),
                (reg, trpoints, [('forward_transforms', 'transforms')]),
                (convertpoints, trpoints, [('out_file', 'input_file')]),
                (trpoints, convertpoints2, [('output_file', 'in_file')]),
                (convertpoints2, outputspec, [('out_file', 'out_points')])])
    return wf
Example #16
0
def create_surface_projection_workflow(name="surfproj", exp_info=None):
    """Project the group mask and thresholded zstat file onto the surface."""
    if exp_info is None:
        exp_info = lyman.default_experiment_parameters()

    inputnode = Node(IdentityInterface(["zstat_file", "mask_file"]), "inputs")

    # Sample the zstat image to the surface
    hemisource = Node(IdentityInterface(["mni_hemi"]), "hemisource")
    hemisource.iterables = ("mni_hemi", ["lh", "rh"])

    zstatproj = Node(
        freesurfer.SampleToSurface(sampling_method=exp_info["sampling_method"],
                                   sampling_range=exp_info["sampling_range"],
                                   sampling_units=exp_info["sampling_units"],
                                   smooth_surf=exp_info["surf_smooth"],
                                   subject_id="fsaverage",
                                   mni152reg=True,
                                   target_subject="fsaverage"), "zstatproj")

    # Sample the mask to the surface
    maskproj = Node(
        freesurfer.SampleToSurface(sampling_range=exp_info["sampling_range"],
                                   sampling_units=exp_info["sampling_units"],
                                   subject_id="fsaverage",
                                   mni152reg=True,
                                   target_subject="fsaverage"), "maskproj")
    if exp_info["sampling_method"] == "point":
        maskproj.inputs.sampling_method = "point"
    else:
        maskproj.inputs.sampling_method = "max"

    outputnode = Node(IdentityInterface(["surf_zstat", "surf_mask"]),
                      "outputs")

    # Define and connect the workflow
    proj = Workflow(name)
    proj.connect([
        (inputnode, zstatproj, [("zstat_file", "source_file")]),
        (inputnode, maskproj, [("mask_file", "source_file")]),
        (hemisource, zstatproj, [("mni_hemi", "hemi")]),
        (hemisource, maskproj, [("mni_hemi", "hemi")]),
        (zstatproj, outputnode, [("out_file", "surf_zstat")]),
        (maskproj, outputnode, [("out_file", "surf_mask")]),
    ])

    return proj
Example #17
0
def amide_noe(zfrqs, name='Amide_NOE'):
    inputnode = Node(IdentityInterface(fields=['zspec_file', 'mask_file']),
                     name='inputnode')
    outputnode = Node(IdentityInterface(fields=['diff_file', 'DS', 'MT', 'Amide', 'NOE']),
                      name='outputnode')
    # Repeat 2-pool fit
    f0_indices = (np.abs(zfrqs) > 9.9) | (np.abs(zfrqs) < 1.1)
    sequence = {'MTSat': {'pulse': {'p1': 0.4,
                                    'p2': 0.3,
                                    'bandwidth': 0.39},
                          'Trf': 0.02,
                          'TR': 4,
                          'FA': 5,
                          'sat_f0': zfrqs[f0_indices].tolist(),
                          'sat_angle': np.repeat(180.0, len(f0_indices)).tolist()}}
    backg_select = Node(Select(volumes=np.where(f0_indices)[0].tolist(), out_file='bg_zspec.nii.gz'),
                        name='backg_select')
Example #18
0
def init_complex_mcf(name='', ref=False, fix_ge=True, negate=True):
    inputnode = Node(
        IdentityInterface(fields=['real_file', 'imag_file', 'ref_file']),
        name='inputnode')
    outputnode = Node(
        IdentityInterface(fields=['x_file', 'ref_file', 'mask_file']),
        name='outputnode')

    ri = Node(Complex(fix_ge=fix_ge,
                      negate=negate,
                      magnitude_out_file=name + '_mag.nii.gz',
                      real_out_file=name + '_r.nii.gz',
                      imag_out_file=name + '_i.nii.gz'),
              name='ri_' + name)
    moco = Node(MCFLIRT(mean_vol=not ref, save_mats=True), name='moco_' + name)
    apply_r = Node(ApplyXfm4D(four_digit=True), name='apply_r' + name)
    apply_i = Node(ApplyXfm4D(four_digit=True), name='apply_i' + name)
    x = Node(Complex(complex_out_file=name + '_x.nii.gz'), name='x_' + name)
    f = Node(Filter(complex_in=True, complex_out=True, filter_spec='Tukey'),
             name='filter_' + name)

    wf = Workflow(name='prep_' + name)
    wf.connect([(inputnode, ri, [('real_file', 'real')]),
                (inputnode, ri, [('imag_file', 'imag')]),
                (ri, moco, [('magnitude_out_file', 'in_file')]),
                (ri, apply_r, [('real_out_file', 'in_file')]),
                (ri, apply_i, [('imag_out_file', 'in_file')]),
                (moco, apply_r, [('mat_dir', 'trans_dir')]),
                (moco, apply_i, [('mat_dir', 'trans_dir')]),
                (apply_r, x, [('out_file', 'real')]),
                (apply_i, x, [('out_file', 'imag')]),
                (x, f, [('complex_out_file', 'in_file')]),
                (f, outputnode, [('out_file', 'x_file')])])
    if not ref:
        mask = Node(BET(mask=True, no_output=True), name='mask')
        wf.connect([(moco, mask, [('mean_img', 'in_file')]),
                    (moco, apply_r, [('mean_img', 'ref_vol')]),
                    (moco, apply_i, [('mean_img', 'ref_vol')]),
                    (moco, outputnode, [('mean_img', 'ref_file')]),
                    (mask, outputnode, [('mask_file', 'mask_file')])])
    else:
        wf.connect([(inputnode, moco, [('ref_file', 'ref_file')]),
                    (inputnode, apply_r, [('ref_file', 'ref_vol')]),
                    (inputnode, apply_i, [('ref_file', 'ref_vol')])])

    return wf
    def make_infosource(self):

        # Infosource: Iterate through subject names
        infosource = Node(interface=IdentityInterface(fields=['subject_id']),
                          name="infosource")

        infosource.iterables = ('subject_id', self.subject_list)
        return infosource
Example #20
0
def create_identity_interface_node(inputs, name):
    """
    This function...
    :param inputs:
    :param name:
    :return:
    """
    return Node(IdentityInterface(inputs), name=name)
Example #21
0
def create_filtering_workflow(name="filter",
                              hpf_cutoff=128,
                              TR=2,
                              output_name="timeseries"):
    """Scale and high-pass filter the timeseries."""
    inputnode = Node(IdentityInterface(["timeseries", "mask_file"]), "inputs")

    # Grand-median scale within the brain mask
    scale = MapNode(ScaleTimeseries(statistic="median", target=10000),
                    ["in_file", "mask_file"], "scale")

    # Gaussian running-line filter
    if hpf_cutoff is None:
        hpf_sigma = -1
    else:
        hpf_sigma = (hpf_cutoff / 2.0) / TR
    filter = MapNode(fsl.TemporalFilter(highpass_sigma=hpf_sigma), "in_file",
                     "filter")

    # Possibly replace the mean
    # (In later versions of FSL, the highpass filter removes the
    # mean component. Put it back, but be flexible so this isn't
    # broken on older versions of FSL).
    replacemean = MapNode(ReplaceMean(output_name=output_name),
                          ["orig_file", "filtered_file"], "replacemean")

    # Compute a final mean functional volume
    meanfunc = MapNode(fsl.MeanImage(out_file="mean_func.nii.gz"), "in_file",
                       "meanfunc")

    outputnode = Node(IdentityInterface(["timeseries", "mean_file"]),
                      "outputs")

    filtering = Workflow(name)
    filtering.connect([
        (inputnode, scale, [("timeseries", "in_file"),
                            ("mask_file", "mask_file")]),
        (scale, filter, [("out_file", "in_file")]),
        (scale, replacemean, [("out_file", "orig_file")]),
        (filter, replacemean, [("out_file", "filtered_file")]),
        (replacemean, meanfunc, [("out_file", "in_file")]),
        (replacemean, outputnode, [("out_file", "timeseries")]),
        (meanfunc, outputnode, [("out_file", "mean_file")]),
    ])

    return filtering
Example #22
0
def create_realignment_workflow(name="realignment"):
    """Motion and slice-time correct the timeseries and summarize."""
    inputnode = Node(IdentityInterface(["timeseries"]), "inputs")

    # Get the middle volume of each run for motion correction
    extractref = MapNode(ExtractRealignmentTarget(), "in_file", "extractref")

    # Motion correct to middle volume of each run
    mcflirt = MapNode(
        fsl.MCFLIRT(cost="normcorr",
                    interpolation="spline",
                    save_mats=True,
                    save_rms=True,
                    save_plots=True), ["in_file", "ref_file"], "mcflirt")

    # Generate a report on the motion correction
    mcreport = MapNode(RealignmentReport(),
                       ["target_file", "realign_params", "displace_params"],
                       "mcreport")

    # Define the outputs
    outputnode = Node(
        IdentityInterface(
            ["timeseries", "example_func", "report", "motion_file"]),
        "outputs")

    # Define and connect the sub workflow
    realignment = Workflow(name)

    realignment.connect([
        (inputnode, extractref, [("timeseries", "in_file")]),
        (inputnode, mcflirt, [("timeseries", "in_file")]),
        (extractref, mcflirt, [("out_file", "ref_file")]),
        (extractref, mcreport, [("out_file", "target_file")]),
        (mcflirt, mcreport, [("par_file", "realign_params"),
                             ("rms_files", "displace_params")]),
        (mcflirt, outputnode, [("out_file", "timeseries")]),
        (extractref, outputnode, [("out_file", "example_func")]),
        (mcreport, outputnode, [("realign_report", "report"),
                                ("motion_file", "motion_file")]),
    ])

    return realignment
Example #23
0
def create_unwarp_workflow(name="unwarp", fieldmap_pe=("y", "y-")):
    """Unwarp functional timeseries using reverse phase-blipped images."""
    inputnode = Node(IdentityInterface(["timeseries", "fieldmap"]), "inputs")

    # Calculate the shift field
    # Note that setting readout_times to 1 will give a fine
    # map of the field, but the units will be off
    # Since we don't write out the map of the field itself, it does
    # not seem worth it to add another parameter for the readout times.
    # (It does require that they are the same, but when wouldn't they be?)
    topup = MapNode(
        fsl.TOPUP(encoding_direction=fieldmap_pe,
                  readout_times=[1] * len(fieldmap_pe)), ["in_file"], "topup")

    # Unwarp the timeseries
    applytopup = MapNode(fsl.ApplyTOPUP(method="jac", in_index=[
        1
    ]), ["in_files", "in_topup_fieldcoef", "in_topup_movpar", "encoding_file"],
                         "applytopup")

    # Make a figure summarize the unwarping
    report = MapNode(UnwarpReport(), ["orig_file", "corrected_file"],
                     "unwarp_report")

    # Define the outputs
    outputnode = Node(IdentityInterface(["timeseries", "report"]), "outputs")

    # Define and connect the workflow
    unwarp = Workflow(name)
    unwarp.connect([
        (inputnode, topup, [("fieldmap", "in_file")]),
        (inputnode, applytopup, [("timeseries", "in_files")]),
        (topup, applytopup, [("out_fieldcoef", "in_topup_fieldcoef"),
                             ("out_movpar", "in_topup_movpar"),
                             ("out_enc_file", "encoding_file")]),
        (inputnode, report, [("fieldmap", "orig_file")]),
        (topup, report, [("out_corrected", "corrected_file")]),
        (applytopup, outputnode, [("out_corrected", "timeseries")]),
        (report, outputnode, [("out_file", "report")]),
    ])

    return unwarp
Example #24
0
def cert(zfrqs):
    inputnode = Node(IdentityInterface(fields=['cert_180', 'cert_360', 'mask_file']),
                     name='inputnode')
    outputnode = Node(IdentityInterface(fields=['cert_spectrum', 'cert_amide']),
                      name='outputnode')

    cert_sub = Node(ImageMaths(op_string='-sub', out_file='cert.nii.gz'),
                    name='cert_subtract')
    amide_index = (np.abs(zfrqs - 3.5)).argmin()
    amide = Node(ExtractROI(t_min=amide_index, t_size=1),
                 name='amide_extract')

    cert = Workflow(name='CERT')
    cert.connect([(inputnode, cert_sub, [('cert_360', 'in_file'), ('cert_180', 'in_file2')]),
                  (cert_sub, amide, [('out_file', 'in_file')]),
                  (cert_sub, outputnode, [('out_file', 'cert_spectrum')]),
                  (amide, outputnode, [('roi_file', 'cert_amide')])
                  ])

    return cert
Example #25
0
def cert(zfrqs):
    inputnode = Node(IdentityInterface(fields=['cert_180', 'cert_360', 'mask_file']),
                     name='inputnode')
    outputnode = Node(IdentityInterface(fields=['cert_spectrum', 'cert_amide']),
                      name='outputnode')

    cert_sub = Node(ImageMaths(op_string='-sub', out_file='cert.nii.gz'),
                    name='cert_subtract', iterfield=['in_file'])
    amide_index = (np.abs(zfrqs - 3.5)).argmin()
    amide = Node(Select(volumes=[amide_index], out_file='amide.nii.gz'),
                 name='select_amide')

    cert = Workflow(name='CERT')
    cert.connect([(inputnode, cert_sub, [('cert_360', 'in_file'), ('cert_180', 'in_file2')]),
                  (cert_sub, amide, [('out_file', 'in_file')]),
                  (cert_sub, outputnode, [('out_file', 'cert_spectrum')]),
                  (amide, outputnode, [('out_file', 'cert_amide')])
                  ])

    return cert
Example #26
0
def create_confound_extraction_workflow(name="confounds", wm_components=6):
    """Extract nuisance variables from anatomical sources."""
    inputnode = Node(
        IdentityInterface(
            ["timeseries", "brain_mask", "reg_file", "subject_id"]), "inputs")

    # Find the subject's Freesurfer segmentation
    # Grab the Freesurfer aparc+aseg file as an anatomical brain mask
    getaseg = Node(
        io.SelectFiles({"aseg": "{subject_id}/mri/aseg.mgz"},
                       base_directory=os.environ["SUBJECTS_DIR"]), "getaseg")

    # Select and erode the white matter to get deep voxels
    selectwm = Node(fs.Binarize(erode=3, wm=True), "selectwm")

    # Transform the mask into functional space
    transform = MapNode(fs.ApplyVolTransform(inverse=True, interp="nearest"),
                        ["reg_file", "source_file"], "transform")

    # Extract eigenvariates of the timeseries from WM and whole brain
    extract = MapNode(ExtractConfounds(n_components=wm_components),
                      ["timeseries", "brain_mask", "wm_mask"], "extract")

    outputnode = Node(IdentityInterface(["confound_file"]), "outputs")

    confounds = Workflow(name)
    confounds.connect([
        (inputnode, getaseg, [("subject_id", "subject_id")]),
        (getaseg, selectwm, [("aseg", "in_file")]),
        (selectwm, transform, [("binary_file", "target_file")]),
        (inputnode, transform, [("reg_file", "reg_file"),
                                ("timeseries", "source_file")]),
        (transform, extract, [("transformed_file", "wm_mask")]),
        (inputnode, extract, [("timeseries", "timeseries"),
                              ("brain_mask", "brain_mask")]),
        (extract, outputnode, [("out_file", "confound_file")]),
    ])

    return confounds
Example #27
0
def runNipypeBet(controller, subject_list, anatomical_id, proj_directory):

    infosource = Node(IdentityInterface(fields=['subject_id']),
                      name="infosource")
    infosource.iterables = [('subject_id', subject_list)]

    #anat_file = opj('{subject_id}','{subject_id}_{anatomical_id}.nii')
    seperator = ''
    concat_words = ('{subject_id}_', anatomical_id, '.nii.gz')
    anat_file_name = seperator.join(concat_words)

    if controller.b_radiological_convention.get() == True:
        anat_file = opj('{subject_id}', anat_file_name)
    else:
        anat_file = opj('{subject_id}', 'Intermediate_Files', 'Original_Files',
                        anat_file_name)

    templates = {'anat': anat_file}

    selectfiles = Node(SelectFiles(templates, base_directory=proj_directory),
                       name="selectfiles")

    skullstrip = Node(BET(robust=True,
                          frac=0.5,
                          vertical_gradient=0,
                          output_type='NIFTI_GZ'),
                      name="skullstrip")

    # Datasink - creates output folder for important outputs
    datasink = Node(DataSink(base_directory=proj_directory), name="datasink")

    wf_sub = Workflow(name="wf_sub")
    wf_sub.base_dir = proj_directory
    wf_sub.connect(infosource, "subject_id", selectfiles, "subject_id")
    wf_sub.connect(selectfiles, "anat", skullstrip, "in_file")
    wf_sub.connect(skullstrip, "out_file", datasink, "bet.@out_file")

    substitutions = [('%s_brain' % (anatomical_id), 'brain')]
    # Feed the substitution strings to the DataSink node
    datasink.inputs.substitutions = substitutions
    # Run the workflow again with the substitutions in place
    wf_sub.run(plugin='MultiProc')

    return 'brain'
Example #28
0
 def with_runs(self, runs: t.List[str]):
     self.runselector = Node(IdentityInterface(fields=['run']),
                             name="RunSelector")
     self.runselector.iterables = ('run', runs)
     self.report_creator.inputs.runs = runs
     fields = self.last_join.interface._fields
     self.pipeline_quality_measures_join_runs = create_flatten_identity_join_node(
         name="JoinPipelinesQualityMeasuresOverRuns",
         fields=fields,
         joinsource=self.runselector,
         flatten_fields=fields)
     self.connections += [
         (self.runselector, self.bidsgrabber, [('run', 'run')]),
         (self.runselector, self.pipelines_quality_measures, [('run', 'run')
                                                              ]),
         (self.last_join, self.pipeline_quality_measures_join_runs,
          list(zip(fields, fields)))
     ]
     self.last_join = self.pipeline_quality_measures_join_runs
def create_corthick_wf():
    import nipype.pipeline.engine as pe  # pypeline engine
    import os
    from nipype import IdentityInterface
    from nipype.interfaces.ants.segmentation import KellyKapowski
    from nipype.interfaces.io import DataSink

    corthick_wf = pe.Workflow(name='corthick_wf')

    inputspec = pe.Node(IdentityInterface(
        fields=['seg_file', 'wmprob_file', 'out_dir'], mandatory_inputs=False),
                        name='inputspec')
    DiReCT = pe.Node(KellyKapowski(), name='DiReCt')
    sinker = pe.Node(DataSink(parameterization=True), name='sinker')

    corthick_wf.connect([
        (inputspec, DiReCT, [('seg_file', 'segmentation_image'),
                             ('wmprob_file', 'white_matter_prob_image')]),
        (inputspec, sinker, [('out_dir', 'base_directory')]),
        (DiReCT, sinker, [('cortical_thickness', 'out.@thick'),
                          ('warped_white_matter', 'out.@wm')]),
    ])
    return corthick_wf
Example #30
0
    def analysis_steps(self):
        self.analysis = type('', (), {})()
        # Get files
        subj_list = [
            subj.split('_')[:-1] for subj in next(os.walk(self.proj_dir))[1]
        ]
        # TODO limit the subj_list to those without sw processed files.

        # for parallelization by subject, use idnetityInterface
        self.analysis.infosource = Node(
            IdentityInterface(fields=['subj_id', 'task']), name="infosource")
        self.analysis.infosource.iterables = [('subject_id', subj_list),
                                              ('task', self.task_names)]

        templates = {
            'anat': '{subj_id}/t1/{subj_id}_t1*.nii',
            'func': '{subj_id}/{task}*/{subj_id}_{task}*.nii'
        }
        self.analysis.sf = Node(SelectFiles(templates), name='selectfiles')
        self.analysis.sf.inputs.base_directory = self.proj_dir

        # Realign
        self.analysis.realign = Node(spm.Realign(register_to_mean=True,
                                                 fwhm=self.opts.fwhm),
                                     name='realign')

        # Coregister
        self.analysis.coreg = Node(spm.Coregister(), name='coregistration')
        # Normalize
        self.analysis.norm12 = Node(spm.Normalize12(
            bias_regularization=1e-05, affine_regularization_type='mni'),
                                    name='normalize')

        #Smooth
        self.analysis.smooth = Node(spm.Smooth(), name='smooth')
        #smooth.inputs.in_files = 'functional.nii'
        self.analysis.smooth.inputs.fwhm = self.opts.smooth_fwhm