Ejemplo n.º 1
0
def test_jsonsink():
    import simplejson
    import os

    ds = nio.JSONFileSink()
    yield assert_equal, ds.inputs._outputs, {}
    ds = nio.JSONFileSink(in_dict={'foo': 'var'})
    yield assert_equal, ds.inputs.in_dict, {'foo': 'var'}
    ds = nio.JSONFileSink(infields=['test'])
    yield assert_true, 'test' in ds.inputs.copyable_trait_names()

    curdir = os.getcwd()
    outdir = mkdtemp()
    os.chdir(outdir)
    js = nio.JSONFileSink(infields=['test'], in_dict={'foo': 'var'})
    js.inputs.new_entry = 'someValue'
    setattr(js.inputs, 'contrasts.alt', 'someNestedValue')
    res = js.run()

    with open(res.outputs.out_file, 'r') as f:
        data = simplejson.load(f)
    yield assert_true, data == {"contrasts": {"alt": "someNestedValue"}, "foo": "var", "new_entry": "someValue"}

    js = nio.JSONFileSink(infields=['test'], in_dict={'foo': 'var'})
    js.inputs.new_entry = 'someValue'
    js.inputs.test = 'testInfields'
    setattr(js.inputs, 'contrasts.alt', 'someNestedValue')
    res = js.run()

    with open(res.outputs.out_file, 'r') as f:
        data = simplejson.load(f)
    yield assert_true, data == {"test": "testInfields", "contrasts": {"alt": "someNestedValue"}, "foo": "var", "new_entry": "someValue"}

    os.chdir(curdir)
    shutil.rmtree(outdir)
Ejemplo n.º 2
0
def test_jsonsink_input():

    ds = nio.JSONFileSink()
    assert ds.inputs._outputs == {}

    ds = nio.JSONFileSink(in_dict={'foo': 'var'})
    assert ds.inputs.in_dict == {'foo': 'var'}

    ds = nio.JSONFileSink(infields=['test'])
    assert 'test' in ds.inputs.copyable_trait_names()
Ejemplo n.º 3
0
def test_jsonsink_input():

    ds = nio.JSONFileSink()
    assert ds.inputs._outputs == {}

    ds = nio.JSONFileSink(in_dict={"foo": "var"})
    assert ds.inputs.in_dict == {"foo": "var"}

    ds = nio.JSONFileSink(infields=["test"])
    assert "test" in ds.inputs.copyable_trait_names()
Ejemplo n.º 4
0
def test_jsonsink(tmpdir, inputs_attributes):
    tmpdir.chdir()
    js = nio.JSONFileSink(infields=['test'], in_dict={'foo': 'var'})
    setattr(js.inputs, 'contrasts.alt', 'someNestedValue')
    expected_data = {"contrasts": {"alt": "someNestedValue"}, "foo": "var"}
    for key, val in inputs_attributes.items():
        setattr(js.inputs, key, val)
        expected_data[key] = val

    res = js.run()
    with open(res.outputs.out_file, 'r') as f:
        data = simplejson.load(f)

    assert data == expected_data
Ejemplo n.º 5
0
#Wraps command **fugue**
NodeHash_60c0018a5a60 = pe.MapNode(interface = fsl.FUGUE(), name = 'NodeName_60c0018a5a60', iterfield = ['in_file', 'fmap_in_file', 'mask_file'])

#Generic datasink module to store structured outputs
NodeHash_6000010a5b80 = pe.Node(interface = io.DataSink(), name = 'NodeName_6000010a5b80')
NodeHash_6000010a5b80.inputs.base_directory = SinkDir
NodeHash_6000010a5b80.inputs.regexp_substitutions = [("func_fieldmapcorr/_NodeName_.{13}", "")]

#Generic datasink module to store structured outputs
NodeHash_608001eb9bc0 = pe.Node(interface = io.DataSink(), name = 'NodeName_608001eb9bc0')
NodeHash_608001eb9bc0.inputs.base_directory = SinkDir
NodeHash_608001eb9bc0.inputs.regexp_substitutions = [("_NodeName_.{13}", "")]

#Very simple frontend for storing values into a JSON file.
NodeHash_6000024a5820 = pe.Node(interface = io.JSONFileSink(), name = 'NodeName_6000024a5820')
NodeHash_6000024a5820.inputs.out_file = OutJSON

#Create a workflow to connect all those nodes
analysisflow = nipype.Workflow('MyWorkflow')
analysisflow.connect(NodeHash_608001eb9bc0, 'out_file', NodeHash_6000024a5820, 'fieldmap')
analysisflow.connect(NodeHash_6000018b2600, 'out_fieldmap', NodeHash_608001eb9bc0, 'fieldmap')
analysisflow.connect(NodeHash_6000010a5b80, 'out_file', NodeHash_6000024a5820, 'func_fieldmapcorr')
analysisflow.connect(NodeHash_600001eab220, 'abs', NodeHash_6000018b2600, 'delta_TE')
analysisflow.connect(NodeHash_60c0018a4860, 'dif', NodeHash_600001eab220, 'x')
analysisflow.connect(NodeHash_604000eb5d20, 'unwarp_direction', NodeHash_60c0018a5a60, 'unwarp_direction')
analysisflow.connect(NodeHash_60c0018a5a60, 'unwarped_file', NodeHash_6000010a5b80, 'func_fieldmapcorr')
analysisflow.connect(NodeHash_6000018b2600, 'out_fieldmap', NodeHash_60c0018a5a60, 'fmap_in_file')
analysisflow.connect(NodeHash_60c0018a6e40, 'out_file', NodeHash_60c0018a5a60, 'mask_file')
analysisflow.connect(NodeHash_604000cba700, 'mask_file', NodeHash_60c0018a6e40, 'in_file')
analysisflow.connect(NodeHash_604000eb5d20, 'dwell_time', NodeHash_60c0018a5a60, 'dwell_time')
Ejemplo n.º 6
0
NodeHash_6040004ad140.inputs.outputtype = "NIFTI_GZ"
NodeHash_6040004ad140.inputs.terminal_output = 'allatonce'

#Generic datasink module to store structured outputs
NodeHash_6080008b3d40 = pe.Node(interface = io.DataSink(), name = 'NodeName_6080008b3d40')
NodeHash_6080008b3d40.inputs.base_directory = SinkDir
NodeHash_6080008b3d40.inputs.regexp_substitutions = [("func_slicetimed/_NodeName_.{13}", "")]

#Basic interface class generates identity mappings
NodeHash_6080008b5660 = pe.Node(utility.IdentityInterface(fields=['func_slicetimed','TR']), name = 'NodeName_6080008b5660')

#Custom interface wrapping function JoinVal2Dict
NodeHash_6040004afde0 = pe.Node(interface = utils_convert.JoinVal2Dict, name = 'NodeName_6040004afde0')

#Very simple frontend for storing values into a JSON file.
NodeHash_6080008b5240 = pe.Node(interface = io.JSONFileSink(), name = 'NodeName_6080008b5240')
NodeHash_6080008b5240.inputs.out_file = OutJSON

#Very simple frontend for storing values into a JSON file.
NodeHash_6080008b7400 = pe.Node(interface = io.JSONFileSink(), name = 'NodeName_6080008b7400')
NodeHash_6080008b7400.inputs.out_file = SinkDir + "/TR.json"

#Create a workflow to connect all those nodes
analysisflow = nipype.Workflow('MyWorkflow')
analysisflow.connect(NodeHash_6040006ae640, 'slicetiming_txt', NodeHash_6040004ad140, 'tpattern')
analysisflow.connect(NodeHash_6040006ae9a0, 'float', NodeHash_6080008b5660, 'TR')
analysisflow.connect(NodeHash_6040006ae640, 'func', NodeHash_6040004ad140, 'in_file')
analysisflow.connect(NodeHash_6040006ae640, 'func', NodeHash_6000004b9860, 'in_file')
analysisflow.connect(NodeHash_6040004aee80, 'str', NodeHash_6040004ad140, 'tr')
analysisflow.connect(NodeHash_6000004b9860, 'TR', NodeHash_6040004aee80, 'float')
analysisflow.connect(NodeHash_6080008b3d40, 'out_file', NodeHash_6040004afde0, 'keys')
Ejemplo n.º 7
0
def fmri_qc_workflow(name='fMRIQC', settings=None):
    """ The fMRI qc workflow """

    if settings is None:
        settings = {}

    workflow = pe.Workflow(name=name)
    deriv_dir = op.abspath(op.join(settings['output_dir'], 'derivatives'))

    if not op.exists(deriv_dir):
        os.makedirs(deriv_dir)

    # Read FD radius, or default it
    fd_radius = settings.get('fd_radius', 50.)

    # Define workflow, inputs and outputs
    inputnode = pe.Node(niu.IdentityInterface(fields=[
        'bids_dir', 'subject_id', 'session_id', 'run_id', 'site_name',
        'start_idx', 'stop_idx'
    ]),
                        name='inputnode')
    get_idx = pe.Node(niu.Function(
        input_names=['in_file', 'start_idx', 'stop_idx'],
        function=fmri_getidx,
        output_names=['start_idx', 'stop_idx']),
                      name='get_idx')

    outputnode = pe.Node(niu.IdentityInterface(fields=[
        'qc', 'mosaic', 'out_group', 'out_movpar', 'out_dvars', 'out_fd'
    ]),
                         name='outputnode')

    # 0. Get data
    datasource = pe.Node(niu.Function(input_names=[
        'bids_dir', 'data_type', 'subject_id', 'session_id', 'run_id'
    ],
                                      output_names=['out_file'],
                                      function=bids_getfile),
                         name='datasource')
    datasource.inputs.data_type = 'func'

    # Workflow --------------------------------------------------------
    # 1. HMC: head motion correct
    hmcwf = hmc_mcflirt()
    if settings.get('hmc_afni', False):
        hmcwf = hmc_afni(
            st_correct=settings.get('correct_slice_timing', False))
    hmcwf.inputs.inputnode.fd_radius = fd_radius

    mean = pe.Node(
        afp.TStat(  # 2. Compute mean fmri
            options='-mean', outputtype='NIFTI_GZ'),
        name='mean')
    bmw = fmri_bmsk_workflow(  # 3. Compute brain mask
        use_bet=settings.get('use_bet', False))

    # Compute TSNR using nipype implementation
    tsnr = pe.Node(nac.TSNR(), name='compute_tsnr')

    # Compute DVARS
    dvnode = pe.Node(nac.ComputeDVARS(remove_zerovariance=True,
                                      save_plot=True,
                                      save_all=True,
                                      figdpi=200,
                                      figformat='pdf'),
                     name='ComputeDVARS')
    fdnode = pe.Node(nac.FramewiseDisplacement(normalize=True,
                                               save_plot=True,
                                               radius=fd_radius,
                                               figdpi=200),
                     name='ComputeFD')

    # AFNI quality measures
    fwhm = pe.Node(afp.FWHMx(combine=True, detrend=True), name='smoothness')
    # fwhm.inputs.acf = True  # add when AFNI >= 16
    outliers = pe.Node(afp.OutlierCount(fraction=True, out_file='ouliers.out'),
                       name='outliers')
    quality = pe.Node(afp.QualityIndex(automask=True),
                      out_file='quality.out',
                      name='quality')

    measures = pe.Node(FunctionalQC(), name='measures')

    # Link images that should be reported
    dsreport = pe.Node(nio.DataSink(base_directory=settings['report_dir'],
                                    parameterization=True),
                       name='dsreport')
    dsreport.inputs.container = 'func'
    dsreport.inputs.substitutions = [
        ('_data', ''), ('fd_power_2012', 'plot_fd'),
        ('tsnr.nii.gz', 'mosaic_TSNR.nii.gz'),
        ('mean.nii.gz', 'mosaic_TSNR_mean.nii.gz'),
        ('stdev.nii.gz', 'mosaic_TSNR_stdev.nii.gz')
    ]
    dsreport.inputs.regexp_substitutions = [
        ('_u?(sub-[\\w\\d]*)\\.([\\w\\d_]*)(?:\\.([\\w\\d_-]*))+',
         '\\1_ses-\\2_\\3'), ('sub-[^/.]*_dvars_std', 'plot_dvars'),
        ('sub-[^/.]*_mask', 'mask'),
        ('sub-[^/.]*_mcf_tstat', 'mosaic_epi_mean')
    ]

    workflow.connect([
        (inputnode, datasource, [('bids_dir', 'bids_dir'),
                                 ('subject_id', 'subject_id'),
                                 ('session_id', 'session_id'),
                                 ('run_id', 'run_id')]),
        (inputnode, get_idx, [('start_idx', 'start_idx'),
                              ('stop_idx', 'stop_idx')]),
        (datasource, get_idx, [('out_file', 'in_file')]),
        (datasource, hmcwf, [('out_file', 'inputnode.in_file')]),
        (get_idx, hmcwf, [('start_idx', 'inputnode.start_idx'),
                          ('stop_idx', 'inputnode.stop_idx')]),
        (hmcwf, bmw, [('outputnode.out_file', 'inputnode.in_file')]),
        (hmcwf, mean, [('outputnode.out_file', 'in_file')]),
        (hmcwf, tsnr, [('outputnode.out_file', 'in_file')]),
        (hmcwf, fdnode, [('outputnode.out_movpar', 'in_plots')]),
        (mean, fwhm, [('out_file', 'in_file')]),
        (bmw, fwhm, [('outputnode.out_file', 'mask')]),
        (hmcwf, outliers, [('outputnode.out_file', 'in_file')]),
        (bmw, outliers, [('outputnode.out_file', 'mask')]),
        (hmcwf, quality, [('outputnode.out_file', 'in_file')]),
        (hmcwf, dvnode, [('outputnode.out_file', 'in_file')]),
        (bmw, dvnode, [('outputnode.out_file', 'in_mask')]),
        (mean, measures, [('out_file', 'in_epi')]),
        (hmcwf, measures, [('outputnode.out_file', 'in_hmc')]),
        (bmw, measures, [('outputnode.out_file', 'in_mask')]),
        (tsnr, measures, [('tsnr_file', 'in_tsnr')]),
        (dvnode, measures, [('out_all', 'in_dvars')]),
        (fdnode, measures, [('out_file', 'in_fd')]),
        (fdnode, outputnode, [('out_file', 'out_fd')]),
        (dvnode, outputnode, [('out_all', 'out_dvars')]),
        (hmcwf, outputnode, [('outputnode.out_movpar', 'out_movpar')]),
        (mean, dsreport, [('out_file', '@meanepi')]),
        (tsnr, dsreport, [('tsnr_file', '@tsnr'), ('stddev_file', '@tsnr_std'),
                          ('mean_file', '@tsnr_mean')]),
        (bmw, dsreport, [('outputnode.out_file', '@mask')]),
        (fdnode, dsreport, [('out_figure', '@fdplot')]),
        (dvnode, dsreport, [('fig_std', '@dvars')]),
    ])

    # Format name
    out_name = pe.Node(niu.Function(
        input_names=['subid', 'sesid', 'runid', 'prefix', 'out_path'],
        output_names=['out_file'],
        function=bids_path),
                       name='FormatName')
    out_name.inputs.out_path = deriv_dir
    out_name.inputs.prefix = 'func'

    # Save to JSON file
    datasink = pe.Node(nio.JSONFileSink(), name='datasink')
    datasink.inputs.qc_type = 'func'

    workflow.connect([
        (inputnode, out_name, [('subject_id', 'subid'),
                               ('session_id', 'sesid'), ('run_id', 'runid')]),
        (inputnode, datasink, [('subject_id', 'subject_id'),
                               ('session_id', 'session_id'),
                               ('run_id', 'run_id')]),
        (fwhm, datasink, [(('fwhm', fwhm_dict), 'fwhm')]),
        (outliers, datasink, [(('out_file', _parse_tout), 'outlier')]),
        (quality, datasink, [(('out_file', _parse_tqual), 'quality')]),
        (measures, datasink, [('summary', 'summary'), ('spacing', 'spacing'),
                              ('size', 'size'), ('fber', 'fber'),
                              ('efc', 'efc'), ('snr', 'snr'), ('gsr', 'gsr'),
                              ('m_tsnr', 'm_tsnr'), ('fd', 'fd'),
                              ('dvars', 'dvars'), ('gcor', 'gcor')]),
        (out_name, datasink, [('out_file', 'out_file')]),
        (datasink, outputnode, [('out_file', 'out_file')])
    ])

    return workflow
Ejemplo n.º 8
0
def fmri_qc_workflow(name='fMRIQC', settings=None):
    """ The fMRI qc workflow """

    if settings is None:
        settings = {}

    workflow = pe.Workflow(name=name)
    deriv_dir = op.abspath('./derivatives')
    if 'work_dir' in settings.keys():
        deriv_dir = op.abspath(op.join(settings['work_dir'], 'derivatives'))

    if not op.exists(deriv_dir):
        os.makedirs(deriv_dir)

    # Read FD radius, or default it
    fd_radius = settings.get('fd_radius', 80.)

    # Define workflow, inputs and outputs
    inputnode = pe.Node(niu.IdentityInterface(fields=[
        'bids_root', 'subject_id', 'session_id', 'run_id', 'site_name',
        'start_idx', 'stop_idx'
    ]),
                        name='inputnode')
    get_idx = pe.Node(niu.Function(
        input_names=['in_file', 'start_idx', 'stop_idx'],
        function=fmri_getidx,
        output_names=['start_idx', 'stop_idx']),
                      name='get_idx')

    outputnode = pe.Node(niu.IdentityInterface(
        fields=['qc', 'mosaic', 'out_group', 'out_movpar', 'out_dvars']),
                         name='outputnode')

    # 0. Get data
    datasource = pe.Node(niu.Function(input_names=[
        'bids_root', 'data_type', 'subject_id', 'session_id', 'run_id'
    ],
                                      output_names=['out_file'],
                                      function=bids_getfile),
                         name='datasource')
    datasource.inputs.data_type = 'func'

    # Workflow --------------------------------------------------------
    # 1. HMC: head motion correct
    hmcwf = hmc_mcflirt()
    if settings.get('hmc_afni', False):
        hmcwf = hmc_afni(
            st_correct=settings.get('correct_slice_timing', False))
    hmcwf.inputs.inputnode.fd_radius = fd_radius

    mean = pe.Node(
        afp.TStat(  # 2. Compute mean fmri
            options='-mean', outputtype='NIFTI_GZ'),
        name='mean')
    bmw = fmri_bmsk_workflow(  # 3. Compute brain mask
        use_bet=settings.get('use_bet', False))

    # Compute TSNR using nipype implementation
    tsnr = pe.Node(nam.TSNR(), name='compute_tsnr')

    # Compute DVARS
    dvnode = pe.Node(ComputeDVARS(), name='ComputeDVARS')

    # AFNI quality measures
    fwhm = pe.Node(afp.FWHMx(combine=True, detrend=True), name='smoothness')
    # fwhm.inputs.acf = True  # add when AFNI >= 16
    outliers = pe.Node(afp.OutlierCount(fraction=True, out_file='ouliers.out'),
                       name='outliers')
    quality = pe.Node(afp.QualityIndex(automask=True),
                      out_file='quality.out',
                      name='quality')

    measures = pe.Node(FunctionalQC(), name='measures')

    # Plots
    plot_mean = pe.Node(PlotMosaic(title='Mean fMRI'), name='plot_mean')
    plot_tsnr = pe.Node(PlotMosaic(title='tSNR volume'), name='plot_tSNR')
    plot_fd = pe.Node(PlotFD(), name='plot_fd')
    plot_fd.inputs.fd_radius = fd_radius

    merg = pe.Node(niu.Merge(3), name='plot_metadata')

    workflow.connect([
        (inputnode, datasource, [('bids_root', 'bids_root'),
                                 ('subject_id', 'subject_id'),
                                 ('session_id', 'session_id'),
                                 ('run_id', 'run_id')]),
        (inputnode, get_idx, [('start_idx', 'start_idx'),
                              ('stop_idx', 'stop_idx')]),
        (datasource, get_idx, [('out_file', 'in_file')]),
        (inputnode, merg, [('session_id', 'in1'), ('run_id', 'in2'),
                           ('site_name', 'in3')]),
        (datasource, hmcwf, [('out_file', 'inputnode.in_file')]),
        (get_idx, hmcwf, [('start_idx', 'inputnode.start_idx'),
                          ('stop_idx', 'inputnode.stop_idx')]),
        (hmcwf, bmw, [('outputnode.out_file', 'inputnode.in_file')]),
        (hmcwf, mean, [('outputnode.out_file', 'in_file')]),
        (hmcwf, tsnr, [('outputnode.out_file', 'in_file')]),
        (mean, plot_mean, [('out_file', 'in_file')]),
        (tsnr, plot_tsnr, [('tsnr_file', 'in_file')]),
        (hmcwf, plot_fd, [('outputnode.out_movpar', 'in_file')]),
        (inputnode, plot_mean, [('subject_id', 'subject')]),
        (inputnode, plot_tsnr, [('subject_id', 'subject')]),
        (inputnode, plot_fd, [('subject_id', 'subject')]),
        (merg, plot_mean, [('out', 'metadata')]),
        (merg, plot_tsnr, [('out', 'metadata')]),
        (merg, plot_fd, [('out', 'metadata')]),
        (mean, fwhm, [('out_file', 'in_file')]),
        (bmw, fwhm, [('outputnode.out_file', 'mask')]),
        (hmcwf, outliers, [('outputnode.out_file', 'in_file')]),
        (bmw, outliers, [('outputnode.out_file', 'mask')]),
        (hmcwf, quality, [('outputnode.out_file', 'in_file')]),
        (hmcwf, dvnode, [('outputnode.out_file', 'in_file')]),
        (bmw, dvnode, [('outputnode.out_file', 'in_mask')]),
        (mean, measures, [('out_file', 'in_epi')]),
        (hmcwf, measures, [('outputnode.out_file', 'in_hmc'),
                           ('outputnode.out_movpar', 'fd_movpar')]),
        (bmw, measures, [('outputnode.out_file', 'in_mask')]),
        (tsnr, measures, [('tsnr_file', 'in_tsnr')]),
        (dvnode, measures, [('out_file', 'in_dvars')]),
        (dvnode, outputnode, [('out_file', 'out_dvars')]),
        (hmcwf, outputnode, [('outputnode.out_movpar', 'out_movpar')]),
    ])

    if settings.get('mosaic_mask', False):
        workflow.connect(bmw, 'outputnode.out_file', plot_mean, 'in_mask')
        workflow.connect(bmw, 'outputnode.out_file', plot_tsnr, 'in_mask')

    # Save mean mosaic to well-formed path
    mvmean = pe.Node(niu.Rename(
        format_string='meanepi_%(subject_id)s_%(session_id)s_%(run_id)s',
        keep_ext=True),
                     name='rename_mean_mosaic')
    dsmean = pe.Node(nio.DataSink(base_directory=settings['work_dir'],
                                  parameterization=False),
                     name='ds_mean')
    workflow.connect([(inputnode, mvmean, [('subject_id', 'subject_id'),
                                           ('session_id', 'session_id'),
                                           ('run_id', 'run_id')]),
                      (plot_mean, mvmean, [('out_file', 'in_file')]),
                      (mvmean, dsmean, [('out_file', '@mosaic')])])
    # Save tSNR mosaic to well-formed path
    mvtsnr = pe.Node(niu.Rename(
        format_string='tsnr_%(subject_id)s_%(session_id)s_%(run_id)s',
        keep_ext=True),
                     name='rename_tsnr_mosaic')
    dstsnr = pe.Node(nio.DataSink(base_directory=settings['work_dir'],
                                  parameterization=False),
                     name='ds_tsnr')
    workflow.connect([(inputnode, mvtsnr, [('subject_id', 'subject_id'),
                                           ('session_id', 'session_id'),
                                           ('run_id', 'run_id')]),
                      (plot_tsnr, mvtsnr, [('out_file', 'in_file')]),
                      (mvtsnr, dstsnr, [('out_file', '@mosaic')])])
    # Save FD plot to well-formed path
    mvfd = pe.Node(niu.Rename(
        format_string='fd_%(subject_id)s_%(session_id)s_%(run_id)s',
        keep_ext=True),
                   name='rename_fd_mosaic')
    dsfd = pe.Node(nio.DataSink(base_directory=settings['work_dir'],
                                parameterization=False),
                   name='ds_fd')
    workflow.connect([(inputnode, mvfd, [('subject_id', 'subject_id'),
                                         ('session_id', 'session_id'),
                                         ('run_id', 'run_id')]),
                      (plot_fd, mvfd, [('out_file', 'in_file')]),
                      (mvfd, dsfd, [('out_file', '@mosaic')])])

    # Format name
    out_name = pe.Node(niu.Function(
        input_names=['subid', 'sesid', 'runid', 'prefix', 'out_path'],
        output_names=['out_file'],
        function=bids_path),
                       name='FormatName')
    out_name.inputs.out_path = deriv_dir
    out_name.inputs.prefix = 'func'

    # Save to JSON file
    datasink = pe.Node(nio.JSONFileSink(), name='datasink')
    datasink.inputs.qc_type = 'func'

    workflow.connect([
        (inputnode, out_name, [('subject_id', 'subid'),
                               ('session_id', 'sesid'), ('run_id', 'runid')]),
        (inputnode, datasink, [('subject_id', 'subject_id'),
                               ('session_id', 'session_id'),
                               ('run_id', 'run_id')]),
        (plot_mean, datasink, [('out_file', 'mean_plot')]),
        (plot_tsnr, datasink, [('out_file', 'tsnr_plot')]),
        (plot_fd, datasink, [('out_file', 'fd_plot')]),
        (fwhm, datasink, [(('fwhm', fwhm_dict), 'fwhm')]),
        (outliers, datasink, [(('out_file', _parse_tout), 'outlier')]),
        (quality, datasink, [(('out_file', _parse_tqual), 'quality')]),
        (measures, datasink, [('summary', 'summary'), ('spacing', 'spacing'),
                              ('size', 'size'), ('fber', 'fber'),
                              ('efc', 'efc'), ('snr', 'snr'), ('gsr', 'gsr'),
                              ('m_tsnr', 'm_tsnr'), ('fd_stats', 'fd_stats'),
                              ('dvars', 'dvars'), ('gcor', 'gcor')]),
        (out_name, datasink, [('out_file', 'out_file')]),
        (datasink, outputnode, [('out_file', 'out_file')])
    ])

    return workflow
Ejemplo n.º 9
0
def anat_qc_workflow(name='MRIQC_Anat', settings=None):
    """
    One-subject-one-session-one-run pipeline to extract the NR-IQMs from
    anatomical images
    """
    if settings is None:
        settings = {}

    workflow = pe.Workflow(name=name)
    deriv_dir = op.abspath(op.join(settings['output_dir'], 'derivatives'))

    if not op.exists(deriv_dir):
        os.makedirs(deriv_dir)
    # Define workflow, inputs and outputs
    inputnode = pe.Node(niu.IdentityInterface(
        fields=['bids_dir', 'subject_id', 'session_id', 'run_id']),
                        name='inputnode')
    outputnode = pe.Node(niu.IdentityInterface(fields=['out_json']),
                         name='outputnode')

    # 0. Get data
    datasource = pe.Node(niu.Function(input_names=[
        'bids_dir', 'data_type', 'subject_id', 'session_id', 'run_id'
    ],
                                      output_names=['anatomical_scan'],
                                      function=bids_getfile),
                         name='datasource')
    datasource.inputs.data_type = 'anat'

    meta = pe.Node(ReadSidecarJSON(), name='metadata')

    # 1a. Reorient anatomical image
    arw = pe.Node(MRIConvert(out_type='niigz', out_orientation='LAS'),
                  name='Reorient')
    # 1b. Estimate bias
    n4itk = pe.Node(ants.N4BiasFieldCorrection(dimension=3, save_bias=True),
                    name='Bias')
    # 2. Skull-stripping (afni)
    asw = skullstrip_wf()
    mask = pe.Node(fsl.ApplyMask(), name='MaskAnatomical')
    # 3. Head mask (including nasial-cerebelum mask)
    hmsk = headmsk_wf()
    # 4. Air mask (with and without artifacts)
    amw = airmsk_wf(settings=settings)

    # Brain tissue segmentation
    segment = pe.Node(fsl.FAST(img_type=1,
                               segments=True,
                               out_basename='segment'),
                      name='segmentation')

    # AFNI check smoothing
    fwhm = pe.Node(afp.FWHMx(combine=True, detrend=True), name='smoothness')
    # fwhm.inputs.acf = True  # add when AFNI >= 16

    # Compute python-coded measures
    measures = pe.Node(StructuralQC(testing=settings.get('testing', False)),
                       'measures')

    # Link images that should be reported
    dsreport = pe.Node(nio.DataSink(base_directory=settings['report_dir'],
                                    parameterization=True),
                       name='dsreport')
    dsreport.inputs.container = 'anat'
    dsreport.inputs.substitutions = [('_data', ''),
                                     ('background_fit', 'plot_bgfit')]
    dsreport.inputs.regexp_substitutions = [
        ('_u?(sub-[\\w\\d]*)\\.([\\w\\d_]*)(?:\\.([\\w\\d_-]*))+',
         '\\1_ses-\\2_\\3'),
        ('anatomical_bgplotsub-[^/.]*_dvars_std', 'plot_dvars'),
        ('sub-[^/.]*_T1w_out_calc_thresh', 'mask'),
        ('sub-[^/.]*_T1w_out\\.', 'mosaic_t1w.')
    ]

    # Connect all nodes
    workflow.connect([
        (inputnode, datasource, [('bids_dir', 'bids_dir'),
                                 ('subject_id', 'subject_id'),
                                 ('session_id', 'session_id'),
                                 ('run_id', 'run_id')]),
        (datasource, arw, [('anatomical_scan', 'in_file')]),
        (datasource, meta, [('anatomical_scan', 'in_file')]),
        (arw, asw, [('out_file', 'inputnode.in_file')]),
        (arw, n4itk, [('out_file', 'input_image')]),
        # (asw, n4itk, [('outputnode.out_mask', 'mask_image')]),
        (n4itk, mask, [('output_image', 'in_file')]),
        (asw, mask, [('outputnode.out_mask', 'mask_file')]),
        (mask, segment, [('out_file', 'in_files')]),
        (n4itk, hmsk, [('output_image', 'inputnode.in_file')]),
        (segment, hmsk, [('tissue_class_map', 'inputnode.in_segm')]),
        (n4itk, measures, [('output_image', 'in_noinu')]),
        (arw, measures, [('out_file', 'in_file')]),
        (arw, fwhm, [('out_file', 'in_file')]),
        (asw, fwhm, [('outputnode.out_mask', 'mask')]),
        (arw, amw, [('out_file', 'inputnode.in_file')]),
        (n4itk, amw, [('output_image', 'inputnode.in_noinu')]),
        (asw, amw, [('outputnode.out_mask', 'inputnode.in_mask')]),
        (hmsk, amw, [('outputnode.out_file', 'inputnode.head_mask')]),
        (amw, measures, [('outputnode.out_file', 'air_msk')]),
        (amw, measures, [('outputnode.artifact_msk', 'artifact_msk')]),
        (segment, measures, [('tissue_class_map', 'in_segm'),
                             ('partial_volume_files', 'in_pvms')]),
        (n4itk, measures, [('bias_image', 'in_bias')]),
        (measures, dsreport, [('out_noisefit', '@anat_noiseplot')]),
        (arw, dsreport, [('out_file', '@anat_t1w')]),
        (asw, dsreport, [('outputnode.out_mask', '@anat_t1_mask')])
    ])

    # Format name
    out_name = pe.Node(niu.Function(
        input_names=['subid', 'sesid', 'runid', 'prefix', 'out_path'],
        output_names=['out_file'],
        function=bids_path),
                       name='FormatName')
    out_name.inputs.out_path = deriv_dir
    out_name.inputs.prefix = 'anat'

    # Save to JSON file
    jfs_if = nio.JSONFileSink()
    setattr(jfs_if, '_always_run', settings.get('force_run', False))
    datasink = pe.Node(jfs_if, name='datasink')
    datasink.inputs.qc_type = 'anat'

    workflow.connect([(inputnode, out_name, [('subject_id', 'subid'),
                                             ('session_id', 'sesid'),
                                             ('run_id', 'runid')]),
                      (inputnode, datasink, [('subject_id', 'subject_id'),
                                             ('session_id', 'session_id'),
                                             ('run_id', 'run_id')]),
                      (fwhm, datasink, [(('fwhm', fwhm_dict), 'fwhm')]),
                      (measures, datasink, [('summary', 'summary'),
                                            ('spacing', 'spacing'),
                                            ('size', 'size'), ('icvs', 'icvs'),
                                            ('rpve', 'rpve'), ('inu', 'inu'),
                                            ('snr', 'snr'), ('cnr', 'cnr'),
                                            ('fber', 'fber'), ('efc', 'efc'),
                                            ('qi1', 'qi1'), ('qi2', 'qi2'),
                                            ('cjv', 'cjv'),
                                            ('wm2max', 'wm2max')]),
                      (out_name, datasink, [('out_file', 'out_file')]),
                      (meta, datasink, [('out_dict', 'metadata')]),
                      (datasink, outputnode, [('out_file', 'out_file')])])
    return workflow
def anat_qc_workflow_dhcp1(name='MRIQC_Anat', settings=None):
    """
    One-subject-one-session-one-run pipeline to extract the NR-IQMs from
    anatomical images
    """
    if settings is None:
        settings = {}

    workflow = pe.Workflow(name=name)
    deriv_dir = op.abspath('./derivatives')
    if 'work_dir' in settings.keys():
        deriv_dir = op.abspath(op.join(settings['work_dir'], 'derivatives'))

    if not op.exists(deriv_dir):
        os.makedirs(deriv_dir)
    # Define workflow, inputs and outputs
    inputnode = pe.Node(niu.IdentityInterface(fields=[
        'bids_root', 'subject_id', 'session_id', 'run_id', 'reorient',
        'bias_corrected', 'bias', 'tissue_segmentation', 'in_pvms'
    ]),
                        name='inputnode')
    outputnode = pe.Node(niu.IdentityInterface(fields=['out_json']),
                         name='outputnode')

    # 0. Get data
    # datasource.inputs.data_type = 'anat'

    # 1a. Reorient anatomical image
    # arw = mri_reorient_wf()
    # 1b. Estimate bias
    # n4itk = pe.Node(ants.N4BiasFieldCorrection(dimension=3, save_bias=True), name='Bias')
    # 2. Skull-stripping (afni)
    # asw = skullstrip_wf()
    # mask = pe.Node(fsl.ApplyMask(), name='MaskAnatomical')
    # 3. Head mask (including nasial-cerebelum mask)
    # hmsk = headmsk_wf()
    # 4. Air mask (with and without artifacts)
    # amw = airmsk_wf(save_memory=settings.get('save_memory', False),
    #                 ants_settings=settings.get('ants_settings', None))

    # Brain tissue segmentation
    # segment = pe.Node(fsl.FAST(
    #     img_type=1, segments=True, out_basename='segment'), name='segmentation')

    # AFNI check smoothing
    fwhm = pe.Node(afp.FWHMx(combine=True, detrend=True), name='smoothness')
    # fwhm.inputs.acf = True  # add when AFNI >= 16

    # Compute python-coded measures
    measures = pe.Node(StructuralQC(), 'measures')

    # Plot mosaic
    plot = pe.Node(PlotMosaic(), name='plot_mosaic')
    merg = pe.Node(niu.Merge(3), name='plot_metadata')

    # Connect all nodes
    workflow.connect([
        (inputnode, fwhm, [('reorient', 'in_file')]),
        (inputnode, fwhm, [('bias_corrected', 'mask')]),

        # (amw, measures, [('outputnode.out_file', 'air_msk')]),
        # (amw, measures, [('outputnode.artifact_msk', 'artifact_msk')]),
        (inputnode, measures, [('reorient', 'in_file'),
                               ('bias_corrected', 'in_noinu'),
                               ('bias', 'in_bias'),
                               ('tissue_segmentation', 'in_segm'),
                               ('in_pvms', 'in_pvms')]),
        (inputnode, plot, [('reorient', 'in_file')]),
        (inputnode, plot, [('subject_id', 'subject')]),
        (inputnode, merg, [('session_id', 'in1'), ('run_id', 'in2')]),
        (merg, plot, [('out', 'metadata')])
    ])

    if settings.get('mask_mosaic', False):
        workflow.connect(inputnode, 'bias_corrected', plot, 'in_mask')

    # Save mosaic to well-formed path
    mvplot = pe.Node(niu.Rename(
        format_string='anatomical_%(subject_id)s_%(session_id)s_%(run_id)s',
        keep_ext=True),
                     name='rename_plot')
    dsplot = pe.Node(nio.DataSink(base_directory=settings['work_dir'],
                                  parameterization=False),
                     name='ds_plot')
    workflow.connect([(inputnode, mvplot, [('subject_id', 'subject_id'),
                                           ('session_id', 'session_id'),
                                           ('run_id', 'run_id')]),
                      (plot, mvplot, [('out_file', 'in_file')]),
                      (mvplot, dsplot, [('out_file', '@mosaic')])])

    # Format name
    out_name = pe.Node(niu.Function(
        input_names=['subid', 'sesid', 'runid', 'prefix', 'out_path'],
        output_names=['out_file'],
        function=bids_path),
                       name='FormatName')
    out_name.inputs.out_path = deriv_dir
    out_name.inputs.prefix = 'anat'

    # Save to JSON file
    datasink = pe.Node(nio.JSONFileSink(), name='datasink')
    datasink.inputs.qc_type = 'anat'

    workflow.connect([
        (inputnode, out_name, [('subject_id', 'subid'),
                               ('session_id', 'sesid'), ('run_id', 'runid')]),
        (inputnode, datasink, [('subject_id', 'subject_id'),
                               ('session_id', 'session_id'),
                               ('run_id', 'run_id')]),
        (plot, datasink, [('out_file', 'mosaic_file')]),
        (fwhm, datasink, [(('fwhm', fwhm_dict), 'fwhm')]),
        (
            measures,
            datasink,
            [
                ('summary', 'summary'),
                ('spacing', 'spacing'),
                ('size', 'size'),
                ('icvs', 'icvs'),
                ('rpve', 'rpve'),
                ('inu', 'inu'),
                ('snr', 'snr'),
                ('cnr', 'cnr'),
                # ('fber', 'fber'),
                ('efc', 'efc'),
                # ('qi1', 'qi1'),
                # ('qi2', 'qi2'),
                ('cjv', 'cjv')
            ]),
        (out_name, datasink, [('out_file', 'out_file')]),
        (datasink, outputnode, [('out_file', 'out_file')])
    ])
    return workflow