def avg_smoothness(inp_file):
    from nipype.interfaces import afni
    fwhm = afni.FWHMx()

    # use automask so it's consistent for raw as well as preprocessed data
    fwhm.inputs.automask = True

    # detrending option
    fwhm.inputs.detrend = True

    fwhm.inputs.in_file = inp_file
    fwhm.inputs.acf = True
    fwhm_run = fwhm.run()
    # It appears the new/correct FWHM is now located under the last position of the ACF estimates.
    # https://afni.nimh.nih.gov/pub/dist/doc/program_help/3dFWHMx.html
    res = fwhm_run.outputs.acf_param[3]
    return res
Example #2
0
def do_Compute_FWHM(infile, outfile):
    '''
    Parameters
    ----------
    infile : str
        full path name to the input image
    outfile : str
        full path name to the output image.

    Returns
    -------
    file containging FWHM of input.
    '''
    print(f'computing fwhm for {infile}\n')
    fwhm = afni.FWHMx()
    fwhm.inputs.in_file = infile
    fwhm.inputs.out_file = outfile + '.out'
    fwhm.inputs.out_subbricks = outfile
    fwhm.run()
    print(f'computation of fwhm is done and saved as {outfile}\n')
Example #3
0
def compute_iqms(settings, name='ComputeIQMs'):
    """Workflow that actually computes the IQMs"""
    workflow = pe.Workflow(name=name)
    inputnode = pe.Node(niu.IdentityInterface(fields=[
        'subject_id', 'session_id', 'run_id', 'orig', 'brainmask', 'airmask',
        'artmask', 'headmask', 'segmentation', 'inu_corrected', 'in_inu',
        'pvms', 'metadata', 'reverse_transforms', 'reverse_invert_flags'
    ]),
                        name='inputnode')
    outputnode = pe.Node(
        niu.IdentityInterface(fields=['out_file', 'out_noisefit']),
        name='outputnode')

    deriv_dir = check_folder(
        op.abspath(op.join(settings['output_dir'], 'derivatives')))

    # AFNI check smoothing
    fwhm = pe.Node(afni.FWHMx(combine=True, detrend=True), name='smoothness')
    # fwhm.inputs.acf = True  # add when AFNI >= 16

    # Mortamet's QI2
    getqi2 = pe.Node(ComputeQI2(erodemsk=settings.get('testing', False)),
                     name='ComputeQI2')

    # Compute python-coded measures
    measures = pe.Node(StructuralQC(), 'measures')

    # Project MNI segmentation to T1 space
    invt = pe.MapNode(ants.ApplyTransforms(dimension=3,
                                           default_value=0,
                                           interpolation='NearestNeighbor'),
                      iterfield=['input_image'],
                      name='MNItpms2t1')
    invt.inputs.input_image = [
        op.join(get_mni_icbm152_nlin_asym_09c(), fname + '.nii.gz')
        for fname in ['1mm_tpm_csf', '1mm_tpm_gm', '1mm_tpm_wm']
    ]

    datasink = pe.Node(IQMFileSink(modality='T1w', out_dir=deriv_dir),
                       name='datasink')

    workflow.connect([(inputnode, datasink, [('subject_id', 'subject_id'),
                                             ('session_id', 'session_id'),
                                             ('run_id', 'run_id'),
                                             ('metadata', 'metadata')]),
                      (inputnode, getqi2, [('orig', 'in_file'),
                                           ('airmask', 'air_msk')]),
                      (inputnode, measures, [('inu_corrected', 'in_noinu'),
                                             ('in_inu', 'in_bias'),
                                             ('orig', 'in_file'),
                                             ('airmask', 'air_msk'),
                                             ('headmask', 'head_msk'),
                                             ('artmask', 'artifact_msk'),
                                             ('segmentation', 'in_segm'),
                                             ('pvms', 'in_pvms')]),
                      (inputnode, fwhm, [('orig', 'in_file'),
                                         ('brainmask', 'mask')]),
                      (inputnode, invt, [('orig', 'reference_image'),
                                         ('reverse_transforms', 'transforms'),
                                         ('reverse_invert_flags',
                                          'invert_transform_flags')]),
                      (invt, measures, [('output_image', 'mni_tpms')]),
                      (measures, datasink, [('out_qc', 'root')]),
                      (getqi2, datasink, [('qi2', 'qi_2')]),
                      (fwhm, datasink, [(('fwhm', fwhm_dict), 'root0')]),
                      (getqi2, outputnode, [('out_file', 'out_noisefit')]),
                      (datasink, outputnode, [('out_file', 'out_file')])])
    return workflow
Example #4
0
    def _run_interface(self, runtime):
        """
        Fit a GLM using AFNI's 3dREMLfit
        """
        import nibabel as nb
        import pandas as pd
        from nipype import logging
        from nipype.interfaces import afni

        logger = logging.getLogger("nipype.interface")

        spec = self.inputs.spec
        mat = pd.read_csv(self.inputs.design_matrix,
                          delimiter="\t",
                          index_col=0)
        contrasts = prepare_contrasts(spec['contrasts'], mat.columns.tolist())
        t_r = mat.index[1]
        design_fname = op.join(runtime.cwd, "design.xmat.1D")
        stim_labels = self.get_stim_labels()
        fname_fmt = op.join(runtime.cwd, "{}_{}.nii.gz").format

        # Write AFNI style design matrix to file
        afni_design = get_afni_design_matrix(mat, contrasts, stim_labels, t_r)
        Path(design_fname).write_text(afni_design)

        img_path = self.inputs.bold_file
        img = nb.load(img_path)

        # Signal scaling occurs by default (the
        # nistats.first_level_model.FirstLevelModel class rewrites the default
        # signal_scaling argument of 0 to be True and then sets the
        # scaling_axis attribute to 0
        signal_scaling = True
        scaling_axis = 0

        # Since this estimator uses a 4d image instead of a 2d matrix the
        # dataset axes must be mapped:
        axis_mapping = {
            # mean scaling each voxel with respect to time
            0: (-1),
            # mean scaling each time point with respect to all voxels
            1: (0, 1, 2),
            # scaling with respect to voxels  and time, which is known as grand mean scaling
            (0, 1):
            None,
        }
        if signal_scaling:
            img_mat = img.get_fdata()
            mean = img_mat.mean(axis=axis_mapping[scaling_axis], keepdims=True)
            if (mean == 0).any():
                logger.warning("Mean values of 0 observed."
                               "The data have probably been centered."
                               "Scaling might not work as expected")
            mean = np.maximum(mean, 1)
            img_mat = 100 * (img_mat / mean - 1)
            img = type(img)(img_mat, img.affine)
            img_path = "_scaled.".join(img_path.split("/")[-1].split(".", 1))
            img.to_filename(img_path)

        # Execute commands
        logger.info(
            f"3dREMLfit and 3dPval computation will be performed in: {runtime.cwd}\n"
        )

        # Define 3dREMLfit command
        remlfit = afni.Remlfit()
        remlfit.inputs.in_files = img_path
        remlfit.inputs.matrix = design_fname
        remlfit.inputs.out_file = "glt_results.nii.gz"
        remlfit.inputs.var_file = "glt_extra_variables.nii.gz"
        remlfit.inputs.wherr_file = "wherrorts.nii.gz"
        remlfit.inputs.errts_file = "errorts.nii.gz"
        remlfit.inputs.rbeta_file = "rbetas.nii.gz"
        remlfit.inputs.tout = True
        remlfit.inputs.rout = True
        remlfit.inputs.fout = True
        remlfit.inputs.verb = True
        remlfit.inputs.usetemp = True
        remlfit.inputs.goforit = True
        remlfit.inputs.mask = self.inputs.mask_file
        reml_res = remlfit.run()

        # calc smoothness
        fwhm = afni.FWHMx()
        fwhm.inputs.in_file = reml_res.outputs.wherr_file
        fwhm.inputs.out_file = fname_fmt("model", "residsmoothness").replace(
            '.nii.gz', '.tsv')
        fwhm_res = fwhm.run()
        fwhm_dat = pd.read_csv(fwhm_res.outputs.out_file,
                               delim_whitespace=True,
                               header=None)
        fwhm_dat.to_csv(fwhm_res.outputs.out_file,
                        index=None,
                        header=False,
                        sep='\t')

        out_maps = nb.load(reml_res.outputs.out_file)
        var_maps = nb.load(reml_res.outputs.var_file)
        beta_maps = nb.load(reml_res.outputs.rbeta_file)

        model_attr_extract = {
            'r_square': (out_maps, 0),
            'log_likelihood': (var_maps, 4),
            'a': (var_maps, 0),
            'b': (var_maps, 1),
            'lam': (var_maps, 2),
            'residwhstd': (var_maps, 3),
            'LjungBox': (var_maps, 5),
        }
        # Save model level maps
        model_maps = []
        model_metadata = []
        for attr, (imgs, idx) in model_attr_extract.items():
            model_metadata.append({'stat': attr, **spec['entities']})
            fname = fname_fmt('model', attr)
            extract_volume(imgs, idx, f"{attr} of model", fname)
            model_maps.append(fname)

        # separate dict for maps that don't need to be extracted
        model_attr = {
            'residtsnr': self.save_tsnr(runtime, beta_maps, var_maps),
            'residsmoothness': fwhm_res.outputs.out_file,
        }
        # Save error time series if people want it
        if self.errorts:
            model_attr["errorts"] = reml_res.outputs.wherr_file

        for attr, fname in model_attr.items():
            model_metadata.append({'stat': attr, **spec["entities"]})
            model_maps.append(fname)

        # get pvals and zscore buckets (niftis with heterogeneous intent codes)
        pval = Pval()
        pval.inputs.in_file = reml_res.outputs.out_file
        pval.inputs.out_file = "pval_maps.nii.gz"
        pvals = pval.run()

        zscore = Pval()
        zscore.inputs.in_file = reml_res.outputs.out_file
        zscore.inputs.out_file = "zscore_maps.nii.gz"
        zscore.inputs.zscore = True
        zscores = zscore.run()

        # create maps object
        maps = {
            "stat": out_maps,
            "z_score": nb.load(zscores.outputs.out_file),
            "p_value": nb.load(pvals.outputs.out_file),
        }
        maps["effect_size"] = maps["stat"]
        self.save_remlfit_results(maps, contrasts, runtime)
        self._results['model_maps'] = model_maps
        self._results['model_metadata'] = model_metadata
        #########################
        # Results are saved to self in save_remlfit_results, if the
        # memory saving is required it should be implemented there
        # nistats_flm.labels_.append(labels)
        # # We save memory if inspecting model details is not necessary
        # if nistats_flm.minimize_memory:
        #     for key in results:
        #         results[key] = SimpleRegressionResults(results[key])
        # nistats_flm.results_.append(results)

        return runtime
Example #5
0
def compute_iqms(settings, name='ComputeIQMs'):
    """Workflow that actually computes the IQMs"""
    workflow = pe.Workflow(name=name)
    inputnode = pe.Node(niu.IdentityInterface(fields=[
        'subject_id', 'session_id', 'task_id', 'run_id', 'orig', 'epi_mean',
        'brainmask', 'hmc_epi', 'hmc_fd', 'in_tsnr', 'metadata']), name='inputnode')
    outputnode = pe.Node(niu.IdentityInterface(
        fields=['out_file', 'out_dvars', 'outliers', 'out_spikes', 'out_fft']),
                         name='outputnode')

    deriv_dir = check_folder(op.abspath(op.join(settings['output_dir'], 'derivatives')))

    # Compute DVARS
    dvnode = pe.Node(nac.ComputeDVARS(save_plot=False, save_all=True), name='ComputeDVARS')

    # AFNI quality measures
    fwhm = pe.Node(afni.FWHMx(combine=True, detrend=True), name='smoothness')
    # fwhm.inputs.acf = True  # add when AFNI >= 16
    outliers = pe.Node(afni.OutlierCount(fraction=True, out_file='ouliers.out'),
                       name='outliers')
    quality = pe.Node(afni.QualityIndex(automask=True), out_file='quality.out',
                      name='quality')

    # FFT spikes finder
    spikes_fft = pe.Node(niu.Function(
        input_names=['in_file'], output_names=['n_spikes', 'out_spikes', 'out_fft'],
        function=slice_wise_fft), name='SpikesFinderFFT')

    measures = pe.Node(FunctionalQC(), name='measures')

    workflow.connect([
        (inputnode, dvnode, [('orig', 'in_file'),
                             ('brainmask', 'in_mask')]),
        (inputnode, measures, [('epi_mean', 'in_epi'),
                               ('brainmask', 'in_mask'),
                               ('hmc_epi', 'in_hmc'),
                               ('hmc_fd', 'in_fd'),
                               ('in_tsnr', 'in_tsnr')]),
        (inputnode, fwhm, [('epi_mean', 'in_file'),
                           ('brainmask', 'mask')]),
        (inputnode, spikes_fft, [('orig', 'in_file')]),
        (inputnode, quality, [('hmc_epi', 'in_file')]),
        (inputnode, outliers, [('hmc_epi', 'in_file'),
                               ('brainmask', 'mask')]),
        (dvnode, measures, [('out_all', 'in_dvars')]),
        (dvnode, outputnode, [('out_all', 'out_dvars')]),
        (outliers, outputnode, [('out_file', 'outliers')]),
        (spikes_fft, outputnode, [('out_spikes', 'out_spikes'),
                                  ('out_fft', 'out_fft')])
    ])

    # Save to JSON file
    datasink = pe.Node(IQMFileSink(
        modality='bold', out_dir=deriv_dir), name='datasink')

    workflow.connect([
        (inputnode, datasink, [('subject_id', 'subject_id'),
                               ('session_id', 'session_id'),
                               ('task_id', 'task_id'),
                               ('run_id', 'run_id'),
                               ('metadata', 'metadata')]),
        (outliers, datasink, [(('out_file', _parse_tout), 'aor')]),
        (quality, datasink, [(('out_file', _parse_tqual), 'aqi')]),
        (measures, datasink, [('out_qc', 'root')]),
        (spikes_fft, datasink, [('n_spikes', 'spikes_num')]),
        (fwhm, datasink, [(('fwhm', fwhm_dict), 'root0')]),
        (datasink, outputnode, [('out_file', 'out_file')])
    ])
    return workflow
Example #6
0
def compute_iqms(settings, name='ComputeIQMs'):
    """
    Workflow that actually computes the IQMs

    .. workflow::

      from mriqc.workflows.functional import compute_iqms
      wf = compute_iqms(settings={'output_dir': 'out'})


    """
    from mriqc.workflows.utils import _tofloat

    biggest_file_gb = settings.get("biggest_file_size_gb", 1)

    workflow = pe.Workflow(name=name)
    inputnode = pe.Node(niu.IdentityInterface(fields=[
        'subject_id', 'session_id', 'task_id', 'acq_id', 'rec_id', 'run_id',
        'orig', 'epi_mean', 'brainmask', 'hmc_epi', 'hmc_fd', 'fd_thres',
        'in_tsnr', 'metadata'
    ]),
                        name='inputnode')
    outputnode = pe.Node(niu.IdentityInterface(
        fields=['out_file', 'out_dvars', 'outliers', 'out_spikes', 'out_fft']),
                         name='outputnode')
    #Set FD threshold
    inputnode.inputs.fd_thres = settings.get('fd_thres', 0.2)
    deriv_dir = check_folder(
        op.abspath(op.join(settings['output_dir'], 'derivatives')))

    # Compute DVARS
    dvnode = pe.Node(nac.ComputeDVARS(save_plot=False, save_all=True),
                     name='ComputeDVARS')
    dvnode.interface.estimated_memory_gb = biggest_file_gb * 3

    # AFNI quality measures
    fwhm = pe.Node(afni.FWHMx(combine=True, detrend=True), name='smoothness')
    # fwhm.inputs.acf = True  # add when AFNI >= 16
    outliers = pe.Node(afni.OutlierCount(fraction=True,
                                         out_file='ouliers.out'),
                       name='outliers')
    outliers.interface.estimated_memory_gb = biggest_file_gb * 2.5
    quality = pe.Node(afni.QualityIndex(automask=True),
                      out_file='quality.out',
                      name='quality')
    quality.interface.estimated_memory_gb = biggest_file_gb * 3

    measures = pe.Node(FunctionalQC(), name='measures')
    measures.interface.estimated_memory_gb = biggest_file_gb * 3

    workflow.connect([(inputnode, dvnode, [('hmc_epi', 'in_file'),
                                           ('brainmask', 'in_mask')]),
                      (inputnode, measures, [('epi_mean', 'in_epi'),
                                             ('brainmask', 'in_mask'),
                                             ('hmc_epi', 'in_hmc'),
                                             ('hmc_fd', 'in_fd'),
                                             ('fd_thres', 'fd_thres'),
                                             ('in_tsnr', 'in_tsnr')]),
                      (inputnode, fwhm, [('epi_mean', 'in_file'),
                                         ('brainmask', 'mask')]),
                      (inputnode, quality, [('hmc_epi', 'in_file')]),
                      (inputnode, outliers, [('hmc_epi', 'in_file'),
                                             ('brainmask', 'mask')]),
                      (dvnode, measures, [('out_all', 'in_dvars')]),
                      (fwhm, measures, [(('fwhm', _tofloat), 'in_fwhm')]),
                      (dvnode, outputnode, [('out_all', 'out_dvars')]),
                      (outliers, outputnode, [('out_file', 'outliers')])])

    # Save to JSON file
    datasink = pe.Node(IQMFileSink(modality='bold', out_dir=deriv_dir),
                       name='datasink')

    workflow.connect([
        (inputnode, datasink, [('subject_id', 'subject_id'),
                               ('session_id', 'session_id'),
                               ('task_id', 'task_id'), ('acq_id', 'acq_id'),
                               ('rec_id', 'rec_id'), ('run_id', 'run_id'),
                               ('metadata', 'metadata')]),
        (outliers, datasink, [(('out_file', _parse_tout), 'aor')]),
        (quality, datasink, [(('out_file', _parse_tqual), 'aqi')]),
        (measures, datasink, [('out_qc', 'root')]),
        (datasink, outputnode, [('out_file', 'out_file')])
    ])

    if settings.get('fft_spikes_detector', False):
        # FFT spikes finder
        spikes_fft = pe.Node(niu.Function(
            input_names=['in_file'],
            output_names=['n_spikes', 'out_spikes', 'out_fft'],
            function=slice_wise_fft),
                             name='SpikesFinderFFT')

        workflow.connect([
            (inputnode, spikes_fft, [('orig', 'in_file')]),
            (spikes_fft, outputnode, [('out_spikes', 'out_spikes'),
                                      ('out_fft', 'out_fft')]),
            (spikes_fft, datasink, [('n_spikes', 'spikes_num')])
        ])

    return workflow
Example #7
0
def first_level_wf(pipeline, subject_id, task_id, output_dir):
    """
    First level workflow
    """
    workflow = pe.Workflow(name='_'.join((pipeline, subject_id, task_id)))

    inputnode = pe.Node(niu.IdentityInterface(fields=[
        'bold_preproc', 'contrasts', 'confounds', 'brainmask', 'events_file'
    ]),
                        name='inputnode')

    outputnode = pe.Node(
        niu.IdentityInterface(fields=['sigma_pre', 'sigma_post', 'out_stats']),
        name='outputnode')

    conf2movpar = pe.Node(niu.Function(function=_confounds2movpar),
                          name='conf2movpar')
    masker = pe.Node(fsl.ApplyMask(), name='masker')
    bim = pe.Node(afni.BlurInMask(fwhm=5.0, outputtype='NIFTI_GZ'),
                  name='bim',
                  mem_gb=20)

    ev = pe.Node(EventsFilesForTask(task=task_id), name='events')

    l1 = pe.Node(SpecifyModel(
        input_units='secs',
        time_repetition=2,
        high_pass_filter_cutoff=100,
        parameter_source='FSL',
    ),
                 name='l1')

    l1model = pe.Node(fsl.Level1Design(interscan_interval=2,
                                       bases={'dgamma': {
                                           'derivs': True
                                       }},
                                       model_serial_correlations=True),
                      name='l1design')

    l1featmodel = pe.Node(fsl.FEATModel(), name='l1model')
    l1estimate = pe.Node(fsl.FEAT(), name='l1estimate', mem_gb=40)

    pre_smooth_afni = pe.Node(afni.FWHMx(combine=True,
                                         detrend=True,
                                         args='-ShowMeClassicFWHM'),
                              name='smooth_pre_afni',
                              mem_gb=20)
    post_smooth_afni = pe.Node(afni.FWHMx(combine=True,
                                          detrend=True,
                                          args='-ShowMeClassicFWHM'),
                               name='smooth_post_afni',
                               mem_gb=20)

    pre_smooth = pe.Node(fsl.SmoothEstimate(), name='smooth_pre', mem_gb=20)
    post_smooth = pe.Node(fsl.SmoothEstimate(), name='smooth_post', mem_gb=20)

    def _resels(val):
        return val**(1 / 3.)

    def _fwhm(fwhm):
        from numpy import mean
        return float(mean(fwhm, dtype=float))

    workflow.connect([
        (inputnode, masker, [('bold_preproc', 'in_file'),
                             ('brainmask', 'mask_file')]),
        (inputnode, ev, [('events_file', 'in_file')]),
        (inputnode, l1model, [('contrasts', 'contrasts')]),
        (inputnode, conf2movpar, [('confounds', 'in_confounds')]),
        (inputnode, bim, [('brainmask', 'mask')]),
        (masker, bim, [('out_file', 'in_file')]),
        (bim, l1, [('out_file', 'functional_runs')]),
        (ev, l1, [('event_files', 'event_files')]),
        (conf2movpar, l1, [('out', 'realignment_parameters')]),
        (l1, l1model, [('session_info', 'session_info')]),
        (ev, l1model, [('orthogonalization', 'orthogonalization')]),
        (l1model, l1featmodel, [('fsf_files', 'fsf_file'),
                                ('ev_files', 'ev_files')]),
        (l1model, l1estimate, [('fsf_files', 'fsf_file')]),
        # Smooth
        (inputnode, pre_smooth, [('bold_preproc', 'zstat_file'),
                                 ('brainmask', 'mask_file')]),
        (bim, post_smooth, [('out_file', 'zstat_file')]),
        (inputnode, post_smooth, [('brainmask', 'mask_file')]),
        (pre_smooth, outputnode, [(('resels', _resels), 'sigma_pre')]),
        (post_smooth, outputnode, [(('resels', _resels), 'sigma_post')]),

        # Smooth with AFNI
        (inputnode, pre_smooth_afni, [('bold_preproc', 'in_file'),
                                      ('brainmask', 'mask')]),
        (bim, post_smooth_afni, [('out_file', 'in_file')]),
        (inputnode, post_smooth_afni, [('brainmask', 'mask')]),
    ])

    # Writing outputs
    csv = pe.Node(AddCSVRow(in_file=str(output_dir / 'smoothness.csv')),
                  name='addcsv_%s_%s' % (subject_id, pipeline))
    csv.inputs.sub_id = subject_id
    csv.inputs.pipeline = pipeline

    # Datasinks
    ds_stats = pe.Node(niu.Function(function=_feat_stats), name='ds_stats')
    ds_stats.inputs.subject_id = subject_id
    ds_stats.inputs.task_id = task_id
    ds_stats.inputs.variant = pipeline
    ds_stats.inputs.out_path = output_dir
    setattr(ds_stats.interface, '_always_run', True)

    workflow.connect([
        (outputnode, csv, [('sigma_pre', 'smooth_pre'),
                           ('sigma_post', 'smooth_post')]),
        (pre_smooth_afni, csv, [(('fwhm', _fwhm), 'fwhm_pre')]),
        (post_smooth_afni, csv, [(('fwhm', _fwhm), 'fwhm_post')]),
        (l1estimate, ds_stats, [('feat_dir', 'feat_dir')]),
        (ds_stats, outputnode, [('out', 'out_stats')]),
    ])
    return workflow