示例#1
0
def calc_fd(mat_file, dataset="fnl"):
    """
    Compute Framewise Displacement as in Power et al 2012 and save to file. Uses the default radius of 50mm.
    
    Args:
        mat_file (path object): .mat file outputted from FSL
        dataset (str): 'fnl' or 'sherlock'
        
    Returns:
        file_path
        
    """

    assert isinstance(mat_file, Path), "realigned_series must be a Path object"
    out_file = mat_file.parent / "fd_power_2012.csv"

    fd = FramewiseDisplacement(
        in_file=mat_file, out_file=out_file, parameter_source="FSL", save_plot=False
    )
    if dataset == "fnl":
        fd.inputs.series_tr = 2.0
    elif dataset == "sherlock":
        fd.inputs.series_tr = 1.5

    _ = fd.run()
    return out_file
示例#2
0
def test_fd():
    tempdir = mkdtemp()
    ground_truth = np.loadtxt(example_data('fsl_motion_outliers_fd.txt'))
    fd = FramewiseDisplacement(in_plots=example_data('fsl_mcflirt_movpar.txt'),
                               out_file=tempdir + '/fd.txt')
    res = fd.run()
    yield assert_equal, np.allclose(ground_truth, np.loadtxt(res.outputs.out_file)), True
    yield assert_equal, np.abs(ground_truth.mean() - res.outputs.fd_average) < 1e-4, True
    rmtree(tempdir)
示例#3
0
def test_fd(tmpdir):
    tempdir = str(tmpdir)
    ground_truth = np.loadtxt(example_data('fsl_motion_outliers_fd.txt'))
    fdisplacement = FramewiseDisplacement(in_plots=example_data('fsl_mcflirt_movpar.txt'),
                                          out_file=tempdir + '/fd.txt')
    res = fdisplacement.run()

    with open(res.outputs.out_file) as all_lines:
        for line in all_lines:
            assert 'FramewiseDisplacement' in line
            break

    assert np.allclose(ground_truth, np.loadtxt(res.outputs.out_file, skiprows=1), atol=.16)
    assert np.abs(ground_truth.mean() - res.outputs.fd_average) < 1e-2
示例#4
0
def test_fd():
    tempdir = mkdtemp()
    ground_truth = np.loadtxt(example_data('fsl_motion_outliers_fd.txt'))
    fdisplacement = FramewiseDisplacement(
        in_plots=example_data('fsl_mcflirt_movpar.txt'),
        out_file=tempdir + '/fd.txt')
    res = fdisplacement.run()

    yield assert_true, np.allclose(ground_truth,
                                   np.loadtxt(res.outputs.out_file),
                                   atol=.16)
    yield assert_true, np.abs(ground_truth.mean() -
                              res.outputs.fd_average) < 1e-2
    rmtree(tempdir)
示例#5
0
def test_fd():
    tempdir = mkdtemp()
    ground_truth = np.loadtxt(example_data('fsl_motion_outliers_fd.txt'))
    fdisplacement = FramewiseDisplacement(in_plots=example_data('fsl_mcflirt_movpar.txt'),
                                          out_file=tempdir + '/fd.txt')
    res = fdisplacement.run()

    with open(res.outputs.out_file) as all_lines:
        for line in all_lines:
            yield assert_in, 'framewise_displacement', line
            break

    yield assert_true, np.allclose(ground_truth, np.loadtxt(res.outputs.out_file), atol=.16)
    yield assert_true, np.abs(ground_truth.mean() - res.outputs.fd_average) < 1e-2

    rmtree(tempdir)
示例#6
0
def test_fd(tmpdir):
    tempdir = str(tmpdir)
    ground_truth = np.loadtxt(example_data('fsl_motion_outliers_fd.txt'))
    fdisplacement = FramewiseDisplacement(
        in_plots=example_data('fsl_mcflirt_movpar.txt'),
        out_file=tempdir + '/fd.txt')
    res = fdisplacement.run()

    with open(res.outputs.out_file) as all_lines:
        for line in all_lines:
            assert 'FramewiseDisplacement' in line
            break

    assert np.allclose(ground_truth,
                       np.loadtxt(res.outputs.out_file, skiprows=1),
                       atol=.16)
    assert np.abs(ground_truth.mean() - res.outputs.fd_average) < 1e-2
示例#7
0
def test_fd(tmpdir):
    tempdir = tmpdir.strpath
    ground_truth = np.loadtxt(example_data("fsl_motion_outliers_fd.txt"))
    fdisplacement = FramewiseDisplacement(
        in_file=example_data("fsl_mcflirt_movpar.txt"),
        out_file=tempdir + "/fd.txt",
        parameter_source="FSL",
    )
    res = fdisplacement.run()

    with open(res.outputs.out_file) as all_lines:
        for line in all_lines:
            assert "FramewiseDisplacement" in line
            break

    assert np.allclose(ground_truth,
                       np.loadtxt(res.outputs.out_file, skiprows=1),
                       atol=0.16)
    assert np.abs(ground_truth.mean() - res.outputs.fd_average) < 1e-2
示例#8
0
文件: workflows.py 项目: MichlF/misc
def create_motion_confound_workflow(order=2,
                                    fd_cutoff=.2,
                                    name='motion_confound'):

    input_node = pe.Node(interface=IdentityInterface(
        fields=['par_file', 'output_directory', 'sub_id']),
                         name='inputspec')

    output_node = pe.Node(
        interface=IdentityInterface(fields=['out_fd', 'out_ext_moco']),
        name='outputspec')

    datasink = pe.Node(DataSink(), name='sinker')
    datasink.inputs.parameterization = False

    extend_motion_parameters = pe.MapNode(Extend_motion_parameters,
                                          iterfield=['par_file'],
                                          name='extend_motion_parameters')
    extend_motion_parameters.inputs.order = order

    framewise_disp = pe.MapNode(FramewiseDisplacement(parameter_source='FSL'),
                                iterfield=['in_file'],
                                name='framewise_disp')

    mcf_wf = pe.Workflow(name=name)
    mcf_wf.connect(input_node, 'output_directory', datasink, 'base_directory')
    mcf_wf.connect(input_node, 'sub_id', datasink, 'container')
    mcf_wf.connect(input_node, 'par_file', extend_motion_parameters,
                   'par_file')
    mcf_wf.connect(input_node, 'par_file', framewise_disp, 'in_file')
    mcf_wf.connect(extend_motion_parameters, 'out_ext', output_node,
                   'out_ext_moco')
    mcf_wf.connect(framewise_disp, 'out_file', output_node, 'out_fd')
    mcf_wf.connect(extend_motion_parameters, 'out_ext', datasink, 'confounds')
    mcf_wf.connect(framewise_disp, 'out_file', datasink, 'confounds.@df')

    return mcf_wf
示例#9
0
文件: functional.py 项目: ylep/mriqc
def hmc(name='fMRI_HMC'):
    """
    Create a :abbr:`HMC (head motion correction)` workflow for fMRI.

    .. workflow::

        from mriqc.workflows.functional import hmc
        from mriqc.testing import mock_config
        with mock_config():
            wf = hmc()

    """
    from nipype.algorithms.confounds import FramewiseDisplacement
    from nipype.interfaces.afni import Calc, TShift, Refit, Despike, Volreg
    from niworkflows.interfaces.registration import EstimateReferenceImage

    mem_gb = config.workflow.biggest_file_gb

    workflow = pe.Workflow(name=name)

    inputnode = pe.Node(niu.IdentityInterface(
        fields=['in_file', 'fd_radius', 'start_idx', 'stop_idx']),
                        name='inputnode')

    outputnode = pe.Node(niu.IdentityInterface(fields=['out_file', 'out_fd']),
                         name='outputnode')

    if any((config.workflow.start_idx is not None, config.workflow.stop_idx
            is not None)):
        drop_trs = pe.Node(Calc(expr='a', outputtype='NIFTI_GZ'),
                           name='drop_trs')
        workflow.connect([
            (inputnode, drop_trs, [('in_file', 'in_file_a'),
                                   ('start_idx', 'start_idx'),
                                   ('stop_idx', 'stop_idx')]),
        ])
    else:
        drop_trs = pe.Node(niu.IdentityInterface(fields=['out_file']),
                           name='drop_trs')
        workflow.connect([
            (inputnode, drop_trs, [('in_file', 'out_file')]),
        ])

    gen_ref = pe.Node(EstimateReferenceImage(mc_method="AFNI"), name="gen_ref")

    # calculate hmc parameters
    hmc = pe.Node(Volreg(args='-Fourier -twopass',
                         zpad=4,
                         outputtype='NIFTI_GZ'),
                  name='motion_correct',
                  mem_gb=mem_gb * 2.5)

    # Compute the frame-wise displacement
    fdnode = pe.Node(FramewiseDisplacement(normalize=False,
                                           parameter_source="AFNI"),
                     name='ComputeFD')

    workflow.connect([
        (inputnode, fdnode, [('fd_radius', 'radius')]),
        (gen_ref, hmc, [('ref_image', 'basefile')]),
        (hmc, outputnode, [('out_file', 'out_file')]),
        (hmc, fdnode, [('oned_file', 'in_file')]),
        (fdnode, outputnode, [('out_file', 'out_fd')]),
    ])

    # Slice timing correction, despiking, and deoblique

    st_corr = pe.Node(TShift(outputtype='NIFTI_GZ'), name='TimeShifts')

    deoblique_node = pe.Node(Refit(deoblique=True), name='deoblique')

    despike_node = pe.Node(Despike(outputtype='NIFTI_GZ'), name='despike')

    if all((config.workflow.correct_slice_timing, config.workflow.despike,
            config.workflow.deoblique)):

        workflow.connect([
            (drop_trs, st_corr, [('out_file', 'in_file')]),
            (st_corr, despike_node, [('out_file', 'in_file')]),
            (despike_node, deoblique_node, [('out_file', 'in_file')]),
            (deoblique_node, gen_ref, [('out_file', 'in_file')]),
            (deoblique_node, hmc, [('out_file', 'in_file')]),
        ])
    elif config.workflow.correct_slice_timing and config.workflow.despike:

        workflow.connect([
            (drop_trs, st_corr, [('out_file', 'in_file')]),
            (st_corr, despike_node, [('out_file', 'in_file')]),
            (despike_node, gen_ref, [('out_file', 'in_file')]),
            (despike_node, hmc, [('out_file', 'in_file')]),
        ])

    elif config.workflow.correct_slice_timing and config.workflow.deoblique:

        workflow.connect([
            (drop_trs, st_corr, [('out_file', 'in_file')]),
            (st_corr, deoblique_node, [('out_file', 'in_file')]),
            (deoblique_node, gen_ref, [('out_file', 'in_file')]),
            (deoblique_node, hmc, [('out_file', 'in_file')]),
        ])

    elif config.workflow.correct_slice_timing:

        workflow.connect([
            (drop_trs, st_corr, [('out_file', 'in_file')]),
            (st_corr, gen_ref, [('out_file', 'in_file')]),
            (st_corr, hmc, [('out_file', 'in_file')]),
        ])

    elif config.workflow.despike and config.workflow.deoblique:

        workflow.connect([
            (drop_trs, despike_node, [('out_file', 'in_file')]),
            (despike_node, deoblique_node, [('out_file', 'in_file')]),
            (deoblique_node, gen_ref, [('out_file', 'in_file')]),
            (deoblique_node, hmc, [('out_file', 'in_file')]),
        ])

    elif config.workflow.despike:

        workflow.connect([
            (drop_trs, despike_node, [('out_file', 'in_file')]),
            (despike_node, gen_ref, [('out_file', 'in_file')]),
            (despike_node, hmc, [('out_file', 'in_file')]),
        ])

    elif config.workflow.deoblique:

        workflow.connect([
            (drop_trs, deoblique_node, [('out_file', 'in_file')]),
            (deoblique_node, gen_ref, [('out_file', 'in_file')]),
            (deoblique_node, hmc, [('out_file', 'in_file')]),
        ])

    else:
        workflow.connect([
            (drop_trs, gen_ref, [('out_file', 'in_file')]),
            (drop_trs, hmc, [('out_file', 'in_file')]),
        ])

    return workflow
示例#10
0
def hmc(name="fMRI_HMC"):
    """
    Create a :abbr:`HMC (head motion correction)` workflow for fMRI.

    .. workflow::

        from mriqc.workflows.functional import hmc
        from mriqc.testing import mock_config
        with mock_config():
            wf = hmc()

    """
    from nipype.algorithms.confounds import FramewiseDisplacement
    from nipype.interfaces.afni import Calc, Despike, Refit, TShift, Volreg

    mem_gb = config.workflow.biggest_file_gb

    workflow = pe.Workflow(name=name)

    inputnode = pe.Node(
        niu.IdentityInterface(
            fields=["in_file", "fd_radius", "start_idx", "stop_idx"]),
        name="inputnode",
    )

    outputnode = pe.Node(niu.IdentityInterface(fields=["out_file", "out_fd"]),
                         name="outputnode")

    if any((
            config.workflow.start_idx is not None,
            config.workflow.stop_idx is not None,
    )):
        drop_trs = pe.Node(Calc(expr="a", outputtype="NIFTI_GZ"),
                           name="drop_trs")
        # fmt: off
        workflow.connect([
            (inputnode, drop_trs, [("in_file", "in_file_a"),
                                   ("start_idx", "start_idx"),
                                   ("stop_idx", "stop_idx")]),
        ])
        # fmt: on
    else:
        drop_trs = pe.Node(niu.IdentityInterface(fields=["out_file"]),
                           name="drop_trs")
        # fmt: off
        workflow.connect([
            (inputnode, drop_trs, [("in_file", "out_file")]),
        ])
        # fmt: on

    # calculate hmc parameters
    hmc = pe.Node(
        Volreg(args="-Fourier -twopass", zpad=4, outputtype="NIFTI_GZ"),
        name="motion_correct",
        mem_gb=mem_gb * 2.5,
    )

    # Compute the frame-wise displacement
    fdnode = pe.Node(
        FramewiseDisplacement(normalize=False, parameter_source="AFNI"),
        name="ComputeFD",
    )

    # fmt: off
    workflow.connect([
        (inputnode, fdnode, [("fd_radius", "radius")]),
        (hmc, outputnode, [("out_file", "out_file")]),
        (hmc, fdnode, [("oned_file", "in_file")]),
        (fdnode, outputnode, [("out_file", "out_fd")]),
    ])
    # fmt: on

    # Slice timing correction, despiking, and deoblique

    st_corr = pe.Node(TShift(outputtype="NIFTI_GZ"), name="TimeShifts")

    deoblique_node = pe.Node(Refit(deoblique=True), name="deoblique")

    despike_node = pe.Node(Despike(outputtype="NIFTI_GZ"), name="despike")

    if all((
            config.workflow.correct_slice_timing,
            config.workflow.despike,
            config.workflow.deoblique,
    )):

        # fmt: off
        workflow.connect([
            (drop_trs, st_corr, [("out_file", "in_file")]),
            (st_corr, despike_node, [("out_file", "in_file")]),
            (despike_node, deoblique_node, [("out_file", "in_file")]),
            (deoblique_node, hmc, [("out_file", "in_file")]),
        ])
        # fmt: on
    elif config.workflow.correct_slice_timing and config.workflow.despike:
        # fmt: off
        workflow.connect([
            (drop_trs, st_corr, [("out_file", "in_file")]),
            (st_corr, despike_node, [("out_file", "in_file")]),
            (despike_node, hmc, [("out_file", "in_file")]),
        ])
        # fmt: on
    elif config.workflow.correct_slice_timing and config.workflow.deoblique:
        # fmt: off
        workflow.connect([
            (drop_trs, st_corr, [("out_file", "in_file")]),
            (st_corr, deoblique_node, [("out_file", "in_file")]),
            (deoblique_node, hmc, [("out_file", "in_file")]),
        ])
        # fmt: on
    elif config.workflow.correct_slice_timing:
        # fmt: off
        workflow.connect([
            (drop_trs, st_corr, [("out_file", "in_file")]),
            (st_corr, hmc, [("out_file", "in_file")]),
        ])
        # fmt: on
    elif config.workflow.despike and config.workflow.deoblique:
        # fmt: off
        workflow.connect([
            (drop_trs, despike_node, [("out_file", "in_file")]),
            (despike_node, deoblique_node, [("out_file", "in_file")]),
            (deoblique_node, hmc, [("out_file", "in_file")]),
        ])
        # fmt: on
    elif config.workflow.despike:
        # fmt: off
        workflow.connect([
            (drop_trs, despike_node, [("out_file", "in_file")]),
            (despike_node, hmc, [("out_file", "in_file")]),
        ])
        # fmt: on
    elif config.workflow.deoblique:
        # fmt: off
        workflow.connect([
            (drop_trs, deoblique_node, [("out_file", "in_file")]),
            (deoblique_node, hmc, [("out_file", "in_file")]),
        ])
        # fmt: on
    else:
        # fmt: off
        workflow.connect([
            (drop_trs, hmc, [("out_file", "in_file")]),
        ])
        # fmt: on
    return workflow
示例#11
0
def create_rs_qc(subjectlist):
    # main workflow for extended qc of diffusion/rsfmri data
    # fsl output type
    fsl.FSLCommand.set_default_output_type('NIFTI_GZ')
    # some hard coded things
    fd_thres = 0.2
    tr = 2

    # Specify the location of the preprocessed data
    data_dir = "/data/pt_life/LIFE_fu/wd_preprocessing/hcp_prep_workflow/resting/"
    working_dir = "/data/pt_life/LIFE_fu/wd_preprocessing/"  #MODIFY
    freesurfer_dir = "/data/pt_life_freesurfer/freesurfer_all"

    qc = Workflow(name="qc")
    qc.base_dir = working_dir + '/'
    qc.config['execution']['crashdump_dir'] = qc.base_dir + "/crash_files"
    qc.config['execution'] = {'hash_method': 'content'}
    #first get all data needed
    identitynode = Node(util.IdentityInterface(fields=['subject']),
                        name='identitynode')
    identitynode.iterables = ('subject', subjectlist)

    info = dict(func=[[
        'transform_timeseries/', '_subject_', 'subj', '/merge/rest2anat.nii.gz'
    ]],
                dvars=[[
                    'transform_timeseries/', '_subject_', 'subj',
                    '/dvars/rest2anat_dvars.tsv'
                ]],
                motpars=[[
                    '/motion_correction/', '_subject_', 'subj',
                    '/mcflirt/rest_realigned.nii.gz.par'
                ]],
                brainmask=[[
                    'transform_timeseries/', '_subject_', 'subj',
                    '/resample_brain/T1_brain_mask_lowres.nii.gz'
                ]])

    ds_rs = Node(interface=nio.DataGrabber(
        infields=['subj'], outfields=['func', 'dvars', 'motpars',
                                      'brainmask']),
                 name='ds_rs')
    ds_rs.inputs.base_directory = data_dir
    ds_rs.inputs.template = '%s%s%s%s'
    ds_rs.inputs.template_args = info
    ds_rs.inputs.sort_filelist = True

    def juggle_subj(input_id):
        import pandas as pd
        from datetime import datetime as dt
        import os
        import random, string

        sic_pseudo = pd.read_csv(
            "/data/gh_gr_agingandobesity_share/life_shared/Data/Preprocessed/derivatives/pseudo_mrt_20201214.csv"
        )
        tmp = sic_pseudo.loc[sic_pseudo.sic == input_id, 'pseudonym']
        pseudo = tmp.get_values()[0] + "_fu"
        return pseudo

    rename = Node(util.Function(input_names=['input_id'],
                                output_names=['output_id'],
                                function=juggle_subj),
                  name="rename")

    get_fs = Node(nio.FreeSurferSource(), name="get_fs")
    get_fs.inputs.subjects_dir = freesurfer_dir

    get_correct_aseg = Node(util.Function(input_names=['in_list'],
                                          output_names=['out_aseg'],
                                          function=get_aseg),
                            name="get_correct_aseg")

    convert = Node(fs.MRIConvert(), name="convert")
    convert.inputs.out_type = "niigz"

    downsample = Node(afni.Resample(resample_mode='NN',
                                    outputtype='NIFTI_GZ',
                                    out_file='aparcaseg_lowres.nii.gz'),
                      name='downsample')

    calc_fd_official = Node(FramewiseDisplacement(parameter_source='FSL'),
                            name='calc_fd_official')

    calc_fd = Node(util.Function(
        input_names=['realignment_parameters_file', 'parameter_source'],
        output_names=['FD_power', 'fn'],
        function=calc_frame_displacement),
                   name="calc_fd")
    calc_fd.inputs.parameter_source = 'FSL'

    outliers = Node(afni.OutlierCount(fraction=True, out_file='outliers.out'),
                    name='outliers',
                    mem_gb=1 * 2.5)

    bigplot = Node(util.Function(input_names=[
        'func', 'seg', 'tr', 'fd_thres', 'outliers', 'dvars', 'fd', 'subj',
        'outfile'
    ],
                                 output_names=['fn', 'dataframe'],
                                 function=make_the_plot),
                   name="bigplot")
    bigplot.inputs.tr = tr
    bigplot.inputs.fd_thres = fd_thres
    bigplot.inputs.outfile = "summary_fmriplot.png"

    fftplot = Node(util.Function(input_names=['fn_pd', 'tr'],
                                 output_names=['fn'],
                                 function=plot_fft),
                   name="fftplot")
    fftplot.inputs.tr = tr

    datasink = Node(name="datasink", interface=nio.DataSink())
    datasink.inputs.base_directory = "/data/pt_life_restingstate_followup/Results/QA"
    datasink.inputs.substitutions = [('_subject_', '')]

    qc.connect([
        (identitynode, rename, [('subject', 'input_id')]),
        (rename, get_fs, [('output_id', 'subject_id')]),
        (identitynode, ds_rs, [('subject', 'subj')]),
        (identitynode, bigplot, [('subject', 'subj')]),
        (get_fs, get_correct_aseg, [('aparc_aseg', 'in_list')]),
        (get_correct_aseg, convert, [('out_aseg', 'in_file')]),
        (convert, downsample, [('out_file', 'in_file')]),
        (ds_rs, downsample, [('func', 'master')]),
        (downsample, bigplot, [('out_file', 'seg')]),
        (ds_rs, calc_fd, [('motpars', 'realignment_parameters_file')]),
        (ds_rs, calc_fd_official, [('motpars', 'in_file')]),
        (ds_rs, bigplot, [('func', 'func')]),
        (ds_rs, bigplot, [('dvars', 'dvars')]),
        (calc_fd, bigplot, [('fn', 'fd')]),  #FD_power
        (ds_rs, outliers, [('func', 'in_file')]),
        (ds_rs, outliers, [('brainmask', 'mask')]),
        (outliers, bigplot, [('out_file', 'outliers')]),
        (bigplot, datasink, [('fn', 'detailedQA.@bigplot')]),
        (bigplot, fftplot, [('dataframe', 'fn_pd')]),
        (bigplot, datasink, [('dataframe', 'detailedQA.metrics.@dataframe')]),
        (fftplot, datasink, [('fn', 'detailedQA.@fftplot')]),
        (calc_fd, datasink, [('fn', 'detailedQA.metrics.@fd')]),
        (calc_fd_official, datasink, [('out_file',
                                       'detailedQA.metrics.@fd_official')])
    ])

    qc.run(plugin="MultiProc", plugin_args={"n_procs": 16, "non_daemon": True})

    return qc