Ejemplo n.º 1
0
def convert_rawdata(base_directory, input_dir, out_prefix):
    os.environ['UNPACK_MGH_DTI'] = '0'
    file_list = os.listdir(input_dir)

    # If RAWDATA folder contains one (and only one) gunzipped nifti file -> copy it
    first_file = os.path.join(input_dir, file_list[0])
    if len(file_list) == 1 and first_file.endswith('nii.gz'):
        copyfile(first_file,
                 os.path.join(base_directory, 'NIFTI',
                              out_prefix + '.nii.gz'), False, False,
                 'content')  # intelligent copy looking at input's content
    else:
        mem = Memory(base_dir=os.path.join(base_directory, 'NIPYPE'))
        mri_convert = mem.cache(fs.MRIConvert)
        #mri_convert = mem.cache(fs.MRIConvert)
        #res = mri_convert(in_file=first_file, out_file=os.path.join(base_directory, 'NIFTI', out_prefix + '.nii.gz'))
        #mr_convert = mem.cache(mrt.MRConvert)
        #res = mr_convert(in_dir=str(input_dir), out_filename=os.path.join(base_directory, 'NIFTI', out_prefix + '.nii.gz'))
        dcm2niix = mem.cache(Dcm2niix)
        res = dcm2niix(source_dir=str(input_dir),
                       output_dir=os.path.join(base_directory, 'NIFTI'),
                       out_filename=out_prefix)
        if len(res.outputs.get()) == 0:
            return False
        if len(res.outputs.get()) == 0:
            return False

    return True
def save_tmaps(copes_loc, mask_loc, working_dir, permutations, rerun=False):
    task_dir = path.dirname(copes_loc)
    contrast_name = path.basename(copes_loc).split('_cope')[0].rstrip(
        '.nii.gz')
    contrast_working_dir = path.join(working_dir, path.basename(copes_loc))
    tfile_loc = path.join(task_dir, "%s_raw_tfile.nii.gz" % contrast_name)
    tfile_corrected_loc = path.join(
        task_dir, "%s_corrected_tfile.nii.gz" % contrast_name)
    makedirs(contrast_working_dir, exist_ok=True)
    # perform permutation test to assess significance
    if not path.exists(tfile_loc) or rerun:
        mem = Memory(base_dir=contrast_working_dir)
        randomise = mem.cache(fsl.Randomise)
        randomise_results = randomise(
            in_file=copes_loc,
            mask=mask_loc,
            one_sample_group_mean=True,
            tfce=True,  # look at paper
            vox_p_values=True,
            var_smooth=10,
            num_perm=permutations)
        # save results
        raw_tfile = randomise_results.outputs.tstat_files[0]
        corrected_tfile = randomise_results.outputs.t_corrected_p_files[0]
        shutil.move(raw_tfile, tfile_loc)
        shutil.move(corrected_tfile, tfile_corrected_loc)
        shutil.rmtree(contrast_working_dir)
    return tfile_loc, tfile_corrected_loc
def anat_preproc(file_to_register, register_to, warp_back, pipeline_dir):
    # DATA CONFIGURATION. FOLLOWING OPENFMRI STANDARD.
    save_to = os.path.join(pipeline_dir,
                           file_to_register.split('/')[-1].split('.')[0])
    # Run pipeline imperatively with caching (without workflow object)
    mem = Memory(pipeline_dir)
    antsreg = mem.cache(Registration)
    transform = mem.cache(ApplyTransforms)
    save_list = []
    # nodes manual parameter configuration and run
    reg = antsreg(args='--float',
                  collapse_output_transforms=True,
                  moving_image=file_to_register,
                  fixed_image=register_to,
                  initial_moving_transform_com=True,
                  num_threads=n_proc,
                  output_inverse_warped_image=True,
                  output_warped_image=True,
                  sigma_units=['vox']*3,
                  transforms=['Rigid', 'Affine', 'SyN'],
                  terminal_output='file',
                  winsorize_lower_quantile=0.005,
                  winsorize_upper_quantile=0.995,
                  convergence_threshold=[1e-06],
                  convergence_window_size=[10],
                  metric=['MI', 'MI', 'CC'],
                  metric_weight=[1.0]*3,
                  number_of_iterations=[[1000, 500, 250, 100],
                                        [1000, 500, 250, 100],
                                        [100, 70, 50, 20]],
                  radius_or_number_of_bins=[32, 32, 4],
                  sampling_percentage=[0.25, 0.25, 1],
                  sampling_strategy=['Regular',
                                     'Regular',
                                     'None'],
                  shrink_factors=[[8, 4, 2, 1]]*3,
                  smoothing_sigmas=[[3, 2, 1, 0]]*3,
                  transform_parameters=[(0.1,),
                                        (0.1,),
                                        (0.1, 3.0, 0.0)],
                  use_histogram_matching=True,
                  write_composite_transform=True)
    save_list.append([reg.outputs.composite_transform, save_to])
    save_list.append([reg.outputs.warped_image, save_to])
    save_list.append([reg.outputs.inverse_composite_transform, save_to])
    save_list.append([reg.outputs.inverse_warped_image, save_to])
    transformed = transform(args='--float',
                            input_image_type=3,
                            interpolation='NearestNeighbor',
                            invert_transform_flags=[False],
                            num_threads=n_proc,
                            reference_image=file_to_register,
                            terminal_output='file',
                            transforms=reg.outputs.inverse_composite_transform,
                            input_image=warp_back)
    save_list.append([transformed.outputs.output_image, save_to])
    return save_list
Ejemplo n.º 4
0
def _reorient(unifized_anat_file,
              unbiased_modality_file,
              write_dir,
              modality_name='modality',
              terminal_output='allatonce',
              caching=False,
              verbose=True,
              environ=None):
    """
    Reorientation of the subject's anatomical image to the modality.

    Parameters
    ----------
    modality_name : str, optional
        Name to be used in the transform filename
    caching : bool, optional
        Wether or not to use caching.
    verbose : bool, optional
        If True, all steps are verbose. Note that caching implies some
        verbosity in any case.

    Returns
    -------
    2-tuple of str : Path to the reoriented anatomical image and to the
        transform from anat to modality.
    """
    if environ is None:
        environ = {'AFNI_DECONFLICT': 'OVERWRITE'}

    if caching:
        memory = Memory(write_dir)
        catmatvec = memory.cache(afni.CatMatvec)
    else:
        catmatvec = afni.CatMatvec().run

    registered_anat_oblique_file, mat_file =\
        _warp(unifized_anat_file, unbiased_modality_file, write_dir,
              caching=caching, verbose=verbose,
              terminal_output=terminal_output,
              environ=environ)
    transform_file = fname_presuffix(
        registered_anat_oblique_file,
        suffix='_anat_to_{}.aff12.1D'.format(modality_name),
        use_ext=False)
    _ = catmatvec(in_file=[(mat_file, 'ONELINE')],
                  oneline=True,
                  out_file=transform_file,
                  environ=environ)

    # Remove the intermediate outputs
    if not caching:
        os.remove(mat_file)

    return registered_anat_oblique_file, transform_file
Ejemplo n.º 5
0
def afni_unifize(in_file,
                 write_dir=None,
                 out_file=None,
                 caching=False,
                 terminal_output='allatonce',
                 verbose=True,
                 environ=None,
                 copy_geometry=False,
                 **unifize_kwargs):
    if write_dir is None:
        write_dir = os.path.dirname(in_file)

    if environ is None:
        environ = {'AFNI_DECONFLICT': 'OVERWRITE'}

    if caching:
        memory = Memory(write_dir)
        copy_geom = memory.cache(fsl.CopyGeom)
        unifize = memory.cache(afni.Unifize)
        copy = memory.cache(afni.Copy)
        unifize.interface().set_default_terminal_output(terminal_output)
        copy.interface().set_default_terminal_output(terminal_output)
    else:
        copy_geom = fsl.CopyGeom(terminal_output=terminal_output).run
        unifize = afni.Unifize(terminal_output=terminal_output).run
        copy = afni.Copy(terminal_output=terminal_output).run

    if out_file is None:
        out_file = fname_presuffix(in_file,
                                   suffix='_unifized',
                                   newpath=write_dir)
    if copy_geometry:
        unifized_file = fname_presuffix(in_file,
                                        suffix='_unifized_rough_geom',
                                        newpath=write_dir)
    else:
        unifized_file = out_file

    out_unifize = unifize(in_file=in_file,
                          out_file=unifized_file,
                          environ=environ,
                          quiet=not (verbose),
                          **unifize_kwargs)

    if copy_geometry:
        out_copy = copy(in_file=out_unifize.outputs.out_file,
                        out_file=out_file,
                        environ=environ)
        out_copy_geom = copy_geom(dest_file=out_copy.outputs.out_file,
                                  in_file=in_file)
    return out_file
Ejemplo n.º 6
0
def convert_rawdata(base_directory, input_dir, out_prefix):
    os.environ['UNPACK_MGH_DTI'] = '0'
    file_list = os.listdir(input_dir)

    # If RAWDATA folder contains one (and only one) gunzipped nifti file -> copy it
    first_file = os.path.join(input_dir, file_list[0])
    if len(file_list) == 1 and first_file.endswith('nii.gz'):
        copyfile(first_file, os.path.join(base_directory, 'NIFTI', out_prefix+'.nii.gz'), False, False, 'content') # intelligent copy looking at input's content
    else:
        mem = Memory(base_dir=os.path.join(base_directory,'NIPYPE'))
        mri_convert = mem.cache(fs.MRIConvert)
        res = mri_convert(in_file=first_file, out_file=os.path.join(base_directory, 'NIFTI', out_prefix + '.nii.gz'))
        if len(res.outputs.get()) == 0:
            return False

    return True
Ejemplo n.º 7
0
def main():
    print('Running subjects:', str(SUBJECTS))
    if not os.path.isdir(MEM_DIR):
        os.mkdir(MEM_DIR)
    mem = Memory(base_dir=MEM_DIR)
    layout = BIDSLayout(BIDS_DIR)
    # func_files[subject_index][run_index]
    if num_runs > 1:
        func_files = [[
            layout.get(type='bold',
                       task=task,
                       run=i + 1,
                       subject=subj,
                       extensions='nii.gz')[0] for i in range(num_runs)
        ] for subj in SUBJECTS]
    else:
        func_files = [
            layout.get(type='bold',
                       task=task,
                       subject=subj,
                       extensions='nii.gz') for subj in SUBJECTS
        ]
    events = get_events(func_files)
    confounds = get_confounds(func_files)
    info = get_info(events, confounds)
    specify_model_results = specify_model(layout, func_files, info)
    level1design_results = lv1_design(mem, layout, func_files,
                                      specify_model_results)
    modelgen_results = feat_model(mem, level1design_results)
    mask_results = masking(mem, func_files)
    film_gls(mem, mask_results, modelgen_results)
Ejemplo n.º 8
0
def ants_n4(in_file,
            write_dir=None,
            caching=False,
            terminal_output='allatonce',
            environ=None,
            copy_geometry=True):
    if write_dir is None:
        write_dir = os.path.dirname(in_file)

    if environ is None:
        environ = {'AFNI_DECONFLICT': 'OVERWRITE'}

    if caching:
        memory = Memory(write_dir)
        bias_correct = memory.cache(ants.N4BiasFieldCorrection)
        copy = memory.cache(afni.Copy)
        copy_geom = memory.cache(fsl.CopyGeom)
        bias_correct.interface().set_default_terminal_output(terminal_output)
        copy.interface().set_default_terminal_output(terminal_output)
    else:
        bias_correct = ants.N4BiasFieldCorrection(
            terminal_output=terminal_output).run
        copy = afni.Copy(terminal_output=terminal_output).run
        copy_geom = fsl.CopyGeom(terminal_output=terminal_output).run

    unbiased_file = fname_presuffix(in_file, suffix='_n4', newpath=write_dir)
    if copy_geometry:
        output_image = fname_presuffix(in_file,
                                       suffix='_n4_rough_geom',
                                       newpath=write_dir)
    else:
        output_image = unbiased_file

    out_bias_correct = bias_correct(
        input_image=in_file,
        shrink_factor=_compute_n4_max_shrink(in_file),
        output_image=output_image)

    if copy_geometry:
        out_copy = copy(in_file=out_bias_correct.outputs.output_image,
                        out_file=unbiased_file,
                        environ=environ)
        out_copy_geom = copy_geom(dest_file=out_copy.outputs.out_file,
                                  in_file=in_file)
    return unbiased_file
Ejemplo n.º 9
0
def _warp(to_warp_file,
          reference_file,
          write_dir=None,
          caching=False,
          terminal_output='allatonce',
          verbose=True,
          environ=None):
    if write_dir is None:
        write_dir = os.path.dirname(to_warp_file)

    if environ is None:
        environ = {'AFNI_DECONFLICT': 'OVERWRITE'}

    if caching:
        memory = Memory(write_dir)
        warp = memory.cache(afni.Warp)
    else:
        warp = afni.Warp().run

    out_warp = warp(in_file=to_warp_file,
                    oblique_parent=reference_file,
                    interp='quintic',
                    gridset=reference_file,
                    out_file=fname_presuffix(to_warp_file,
                                             suffix='_warped',
                                             newpath=write_dir),
                    verbose=True,
                    save_warp=True,
                    environ=environ)

    # 3dWarp doesn't put the obliquity in the header, so do it manually
    warped_oblique_file = fix_obliquity(out_warp.outputs.out_file,
                                        reference_file,
                                        verbose=verbose,
                                        caching=caching,
                                        caching_dir=write_dir,
                                        environ=environ)

    return warped_oblique_file, out_warp.outputs.warp_file
Ejemplo n.º 10
0
def test_caching():
    temp_dir = mkdtemp(prefix='test_memory_')
    old_rerun = config.get('execution', 'stop_on_first_rerun')
    try:
        # Prevent rerun to check that evaluation is computed only once
        config.set('execution', 'stop_on_first_rerun', 'true')
        mem = Memory(temp_dir)
        first_nb_run = nb_runs
        results = mem.cache(SideEffectInterface)(input1=2, input2=1)
        assert_equal(nb_runs, first_nb_run + 1)
        assert_equal(results.outputs.output1, [1, 2])
        results = mem.cache(SideEffectInterface)(input1=2, input2=1)
        # Check that the node hasn't been rerun
        assert_equal(nb_runs, first_nb_run + 1)
        assert_equal(results.outputs.output1, [1, 2])
        results = mem.cache(SideEffectInterface)(input1=1, input2=1)
        # Check that the node hasn been rerun
        assert_equal(nb_runs, first_nb_run + 2)
        assert_equal(results.outputs.output1, [1, 1])
    finally:
        rmtree(temp_dir)
        config.set('execution', 'stop_on_first_rerun', old_rerun)
Ejemplo n.º 11
0
def test_caching():
    temp_dir = mkdtemp(prefix='test_memory_')
    old_rerun = config.get('execution', 'stop_on_first_rerun')
    try:
        # Prevent rerun to check that evaluation is computed only once
        config.set('execution', 'stop_on_first_rerun', 'true')
        mem = Memory(temp_dir)
        first_nb_run = nb_runs
        results = mem.cache(SideEffectInterface)(input1=2, input2=1)
        assert_equal(nb_runs, first_nb_run + 1)
        assert_equal(results.outputs.output1, [1, 2])
        results = mem.cache(SideEffectInterface)(input1=2, input2=1)
        # Check that the node hasn't been rerun
        assert_equal(nb_runs, first_nb_run + 1)
        assert_equal(results.outputs.output1, [1, 2])
        results = mem.cache(SideEffectInterface)(input1=1, input2=1)
        # Check that the node hasn been rerun
        assert_equal(nb_runs, first_nb_run + 2)
        assert_equal(results.outputs.output1, [1, 1])
    finally:
        rmtree(temp_dir)
        config.set('execution', 'stop_on_first_rerun', old_rerun)
Ejemplo n.º 12
0
def _delete_orientation(in_file,
                        write_dir=None,
                        min_zoom=.1,
                        caching=False,
                        verbose=True):
    if write_dir is None:
        write_dir = os.path.dirname(in_file)

    if verbose:
        terminal_output = 'stream'
    else:
        terminal_output = 'none'

    if caching:
        memory = Memory(write_dir)
        copy = memory.cache(afni.Copy)
        refit = memory.cache(afni.Refit)
        center_mass = memory.cache(afni.CenterMass)
        for step in [copy, refit]:
            step.interface().set_default_terminal_output(terminal_output)
    else:
        copy = afni.Copy(terminal_output=terminal_output).run
        refit = afni.Refit(terminal_output=terminal_output).run
        center_mass = afni.CenterMass().run

    out_copy = copy(in_file=in_file,
                    out_file=fname_presuffix(in_file, newpath=write_dir))

    zooms = nibabel.load(in_file).header.get_zooms()[:3]
    out_refit = refit(in_file=out_copy.outputs.out_file,
                      xyzscale=min_zoom / min(zooms))
    out_center_mass = center_mass(in_file=out_refit.outputs.out_file,
                                  cm_file=fname_presuffix(in_file,
                                                          suffix='.txt',
                                                          use_ext=False,
                                                          newpath=write_dir),
                                  set_cm=(0, 0, 0))
    return out_center_mass.outputs.out_file
Ejemplo n.º 13
0
def _slice_time(func_file,
                t_r,
                write_dir,
                caching=False,
                terminal_output='allatonce',
                environ=None):
    if environ is None:
        environ = {'AFNI_DECONFLICT': 'OVERWRITE'}

    if caching:
        memory = Memory(write_dir)
        tshift = memory.cache(afni.TShift)
        tshift.interface().set_default_terminal_output(terminal_output)
    else:
        tshift = afni.TShift(terminal_output=terminal_output).run

    out_tshift = tshift(in_file=func_file,
                        out_file=fname_presuffix(func_file,
                                                 suffix='_tshifted',
                                                 newpath=write_dir),
                        tpattern='altplus',
                        tr=str(t_r),
                        environ=environ)
    return out_tshift.outputs.out_file
Ejemplo n.º 14
0
def dcm2nii(source_names,
            terminal_output="allatonce",
            gzip_output=False,
            anonymize=True,
            output_dir=None,
            caching=True,
            **other_dcm2nii_kwargs):
    """
    Converts DICOM (dcm) images to Nifti.

    """

    # all input files must be DICOM
    if is_niimg(source_names):
        return source_names, None

    for source_name in [source_names] if isinstance(
            source_names, _basestring) else source_names:
        if not isdicom(source_name):
            return source_names, None  # not (all) DICOM; nothx to do

    # sanitize output dir
    if output_dir is None:
        output_dir = os.path.dirname(source_names if isinstance(
            source_names, _basestring) else source_names[0])
    if not os.path.exists(output_dir):
        os.makedirs(output_dir)

    # prepare node
    if caching:
        cache_dir = os.path.join(output_dir, "cache_dir")
        if not os.path.exists(cache_dir):
            os.makedirs(cache_dir)
        dcm2nii_node = Memory(cache_dir).cache(Dcm2nii)
    else:
        dcm2nii_node = Dcm2nii.run

    # run node and collect results
    dcm2nii_result = dcm2nii_node(source_names=source_names,
                                  terminal_output=terminal_output,
                                  anonymize=anonymize,
                                  gzip_output=gzip_output,
                                  output_dir=output_dir,
                                  **other_dcm2nii_kwargs)

    return dcm2nii_result.outputs.converted_files, dcm2nii_result
Ejemplo n.º 15
0
import nipype.interfaces.spm as spm
from nipype.caching import Memory

from procasl import preprocessing, quantification

# Create a memory context
mem = Memory('/tmp/no_workflow')

# Give data location
func_file = '/tmp/func.nii'
anat_file = '/tmp/anat.nii'

# Set spm paths
matlab_cmd = '/i2bm/local/spm8-standalone/run_spm8.sh ' +\
    '/i2bm/local/spm8-standalone/mcr/v713 script'
spm.SPMCommand.set_mlab_paths(matlab_cmd=matlab_cmd, use_mcr=True)
paths = ['/i2bm/local/spm8-standalone/spm8_mcr/spm8/']  # TODO: check needed

# Get Tag/Control sequence
get_tag_ctl = mem.cache(preprocessing.GetTagControl)
out_get_tag_ctl = get_tag_ctl(in_file=func_file)

# Rescale
rescale = mem.cache(preprocessing.Rescale)
out_rescale = rescale(in_file=out_get_tag_ctl.outputs.tag_ctl_file,
                      ss_tr=35.4, t_i_1=800., t_i_2=1800.)

# Realign to first scan
realign = mem.cache(preprocessing.Realign)
out_realign = realign(
    in_file=out_rescale.outputs.rescaled_file,
Ejemplo n.º 16
0
import numpy as np
from cfutils import get_subjects, get_subject_data

X = get_subjects()
_, pdata = get_subject_data(X)
X = pdata.subject
y = pdata.lsas_pre - pdata.lsas_post

lgroup,_ = get_subject_data(X[y<=np.median(y)])
hgroup,_ = get_subject_data(X[y>np.median(y)])

import nipype.interfaces.spm as spm

from nipype.caching import Memory
os.makedirs('/mindhive/scratch/satra/sadfigures/nipype_mem')
mem = Memory('/mindhive/scratch/satra/sadfigures')

designer = mem.cache(spm.OneSampleTTestDesign)
estimator = mem.cache(spm.EstimateModel)
cestimator = mem.cache(spm.EstimateContrast)

ldesres =  designer(in_files = lgroup)
lestres = estimator(spm_mat_file=ldesres.outputs.spm_mat_file,
                    estimation_method={'Classical':None})
lcestres = cestimator(spm_mat_file=lestres.outputs.spm_mat_file,
                      beta_images=lestres.outputs.beta_images,
                      residual_image=lestres.outputs.residual_image,
                      group_contrast=True,
                      contrasts=[('LGroup', 'T', ['mean'], [1])])

hdesres =  designer(in_files = hgroup)
def run_suject_level1_glm(
        subject_data,
        readout_time=.01392,  # seconds
        tr=.72,
        dc=True,
        hrf_model="Canonical with Derivative",
        drift_model="Cosine",
        hfcut=100,
        regress_motion=True,
        slicer='ortho',
        cut_coords=None,
        threshold=3.,
        cluster_th=15,
        normalize=True,
        fwhm=0.,
        protocol="MOTOR",
        func_write_voxel_sizes=None,
        anat_write_voxel_sizes=None,
        **other_preproc_kwargs):
    """
    Function to do preproc + analysis for a single HCP subject (task fMRI)

    """

    add_regs_files = None
    n_motion_regressions = 6
    subject_data.n_sessions = 2

    subject_data.tmp_output_dir = os.path.join(subject_data.output_dir, "tmp")
    if not os.path.exists(subject_data.tmp_output_dir):
        os.makedirs(subject_data.tmp_output_dir)

    if not os.path.exists(subject_data.output_dir):
        os.makedirs(subject_data.output_dir)

    mem = Memory(os.path.join(subject_data.output_dir, "cache_dir"),
                 verbose=100)

    # glob design files (.fsf)
    subject_data.design_files = [
        os.path.join(subject_data.data_dir,
                     ("MNINonLinear/Results/tfMRI_%s_%s/"
                      "tfMRI_%s_%s_hp200_s4_level1.fsf") %
                     (protocol, direction, protocol, direction))
        for direction in ['LR', 'RL']
    ]

    assert len(subject_data.design_files) == 2
    for df in subject_data.design_files:
        if not os.path.isfile(df):
            return

    if 0x0:
        subject_data = _do_fmri_distortion_correction(
            subject_data,
            dc=dc,
            fwhm=fwhm,
            readout_time=readout_time,
            **other_preproc_kwargs)

    # chronometry
    stats_start_time = pretty_time()

    # merged lists
    paradigms = []
    frametimes_list = []
    design_matrices = []
    # fmri_files = []
    n_scans = []
    # for direction, direction_index in zip(['LR', 'RL'], xrange(2)):
    for sess in xrange(subject_data.n_sessions):
        direction = ['LR', 'RL'][sess]
        # glob the design file
        # design_file = os.path.join(# _subject_data_dir, "tfMRI_%s_%s" % (
        # protocol, direction),
        design_file = subject_data.design_files[sess]
        #                    "tfMRI_%s_%s_hp200_s4_level1.fsf" % (
        # protocol, direction))
        if not os.path.isfile(design_file):
            print "Can't find design file %s; skipping subject %s" % (
                design_file, subject_data.subject_id)
            return

        # read the experimental setup
        print "Reading experimental setup from %s ..." % design_file
        fsl_condition_ids, timing_files, fsl_contrast_ids, contrast_values = \
            read_fsl_design_file(design_file)
        print "... done.\r\n"

        # fix timing filenames
        timing_files = [
            tf.replace("EVs", "tfMRI_%s_%s/EVs" % (protocol, direction))
            for tf in timing_files
        ]

        # make design matrix
        print "Constructing design matrix for direction %s ..." % direction
        _n_scans = nibabel.load(subject_data.func[sess]).shape[-1]
        n_scans.append(_n_scans)
        add_regs_file = add_regs_files[
            sess] if not add_regs_files is None else None
        design_matrix, paradigm, frametimes = make_dmtx_from_timing_files(
            timing_files,
            fsl_condition_ids,
            n_scans=_n_scans,
            tr=tr,
            hrf_model=hrf_model,
            drift_model=drift_model,
            hfcut=hfcut,
            add_regs_file=add_regs_file,
            add_reg_names=[
                'Translation along x axis', 'Translation along yaxis',
                'Translation along z axis', 'Rotation along x axis',
                'Rotation along y axis', 'Rotation along z axis',
                'Differential Translation along x axis',
                'Differential Translation along yaxis',
                'Differential Translation along z axis',
                'Differential Rotation along x axis',
                'Differential Rotation along y axis',
                'Differential Rotation along z axis'
            ][:n_motion_regressions] if not add_regs_files is None else None,
        )

        print "... done."
        paradigms.append(paradigm)
        frametimes_list.append(frametimes)
        design_matrices.append(design_matrix)

        # convert contrasts to dict
        contrasts = dict((
            contrast_id,
            # append zeros to end of contrast to match design
            np.hstack((
                contrast_value,
                np.zeros(len(design_matrix.names) - len(contrast_value)))))
                         for contrast_id, contrast_value in zip(
                             fsl_contrast_ids, contrast_values))

        # more interesting contrasts
        if protocol == 'MOTOR':
            contrasts['RH-LH'] = contrasts['RH'] - contrasts['LH']
            contrasts['LH-RH'] = -contrasts['RH-LH']
            contrasts['RF-LF'] = contrasts['RF'] - contrasts['LF']
            contrasts['LF-RF'] = -contrasts['RF-LF']
            contrasts['H'] = contrasts['RH'] + contrasts['LH']
            contrasts['F'] = contrasts['RF'] + contrasts['LF']
            contrasts['H-F'] = contrasts['RH'] + contrasts['LH'] - (
                contrasts['RF'] - contrasts['LF'])
            contrasts['F-H'] = -contrasts['H-F']

        contrasts = dict((k, v) for k, v in contrasts.iteritems() if "-" in k)

    # replicate contrasts across sessions
    contrasts = dict((cid, [cval] * 2) for cid, cval in contrasts.iteritems())

    cache_dir = cache_dir = os.path.join(subject_data.output_dir, 'cache_dir')
    if not os.path.exists(cache_dir):
        os.makedirs(cache_dir)
    nipype_mem = NipypeMemory(base_dir=cache_dir)

    if 0x0:
        if np.sum(fwhm) > 0.:
            subject_data.func = nipype_mem.cache(spm.Smooth)(
                in_files=subject_data.func,
                fwhm=fwhm,
                ignore_exception=False,
            ).outputs.smoothed_files

    # fit GLM
    def tortoise(*args):
        print args
        print(
            'Fitting a "Fixed Effect" GLM for merging LR and RL '
            'phase-encoding directions for subject %s ...' %
            (subject_data.subject_id))
        fmri_glm = FMRILinearModel(
            subject_data.func,
            [design_matrix.matrix for design_matrix in design_matrices],
            mask='compute')
        fmri_glm.fit(do_scaling=True, model='ar1')
        print "... done.\r\n"

        # save computed mask
        mask_path = os.path.join(subject_data.output_dir, "mask.nii")
        print "Saving mask image to %s ..." % mask_path
        nibabel.save(fmri_glm.mask, mask_path)
        print "... done.\r\n"

        z_maps = {}
        effects_maps = {}
        map_dirs = {}
        try:
            for contrast_id, contrast_val in contrasts.iteritems():
                print "\tcontrast id: %s" % contrast_id
                z_map, eff_map = fmri_glm.contrast(contrast_val,
                                                   con_id=contrast_id,
                                                   output_z=True,
                                                   output_effects=True)

                # store stat maps to disk
                for map_type, out_map in zip(['z', 'effects'],
                                             [z_map, eff_map]):
                    map_dir = os.path.join(subject_data.output_dir,
                                           '%s_maps' % map_type)
                    map_dirs[map_type] = map_dir
                    if not os.path.exists(map_dir):
                        os.makedirs(map_dir)
                    map_path = os.path.join(
                        map_dir, '%s_%s.nii' % (map_type, contrast_id))
                    print "\t\tWriting %s ..." % map_path

                    nibabel.save(out_map, map_path)

                    # collect zmaps for contrasts we're interested in
                    if map_type == 'z':
                        z_maps[contrast_id] = map_path

                    if map_type == 'effects':
                        effects_maps[contrast_id] = map_path

            return effects_maps, z_maps, mask_path, map_dirs
        except:
            return None

    # compute native-space maps and mask
    stuff = mem.cache(tortoise)(subject_data.func, subject_data.anat)
    if stuff is None:
        return None
    effects_maps, z_maps, mask_path, map_dirs = stuff

    # remove repeated contrasts
    contrasts = dict((cid, cval[0]) for cid, cval in contrasts.iteritems())
    import json
    json.dump(
        dict((k, list(v)) for k, v in contrasts.iteritems()),
        open(os.path.join(subject_data.tmp_output_dir, "contrasts.json"), "w"))
    subject_data.contrasts = contrasts

    if normalize:
        assert hasattr(subject_data, "parameter_file")

        subject_data.native_effects_maps = effects_maps
        subject_data.native_z_maps = z_maps
        subject_data.native_mask_path = mask_path

        # warp effects maps and mask from native to standard space (MNI)
        apply_to_files = [
            v for _, v in subject_data.native_effects_maps.iteritems()
        ] + [subject_data.native_mask_path]
        tmp = nipype_mem.cache(spm.Normalize)(
            parameter_file=getattr(subject_data, "parameter_file"),
            apply_to_files=apply_to_files,
            write_bounding_box=[[-78, -112, -50], [78, 76, 85]],
            write_voxel_sizes=func_write_voxel_sizes,
            write_wrap=[0, 0, 0],
            write_interp=1,
            jobtype='write',
            ignore_exception=False,
        ).outputs.normalized_files

        subject_data.mask = hard_link(tmp[-1], subject_data.output_dir)
        subject_data.effects_maps = dict(
            zip(effects_maps.keys(), hard_link(tmp[:-1], map_dirs["effects"])))

        # warp anat image
        subject_data.anat = hard_link(
            nipype_mem.cache(spm.Normalize)(
                parameter_file=getattr(subject_data, "parameter_file"),
                apply_to_files=subject_data.anat,
                write_bounding_box=[[-78, -112, -50], [78, 76, 85]],
                write_voxel_sizes=anat_write_voxel_sizes,
                write_wrap=[0, 0, 0],
                write_interp=1,
                jobtype='write',
                ignore_exception=False,
            ).outputs.normalized_files, subject_data.anat_output_dir)
    else:
        subject_data.mask = mask_path
        subject_data.effects_maps = effects_maps
        subject_data.z_maps = z_maps

    return subject_data
Ejemplo n.º 18
0
# Loop over subjects
for (func_file, anat_file) in zip(
        heroes['basal ASL'], heroes['anat']):
    # Create a memory context
    subject_directory = os.path.relpath(anat_file, subjects_parent_directory)
    subject_directory = subject_directory.split(os.sep)[0]
    cache_directory = os.path.join(os.path.expanduser('~/CODE/process-asl'),
                                   'procasl_cache', 'heroes',
                                   subject_directory)
    if not os.path.exists(cache_directory):
        os.mkdir(cache_directory)

    # nipype saves .m scripts into cwd
    os.chdir(cache_directory)
    mem = Memory(cache_directory)

    # Get Tag/Control sequence
    get_tag_ctl = mem.cache(preprocessing.RemoveFirstScanControl)
    out_get_tag_ctl = get_tag_ctl(in_file=func_file)

    # Rescale
    rescale = mem.cache(preprocessing.Rescale)
    out_rescale = rescale(in_file=out_get_tag_ctl.outputs.tag_ctl_file,
                          ss_tr=35.4, t_i_1=800., t_i_2=1800.)

    # Realign to first scan
    realign = mem.cache(preprocessing.Realign)
    out_realign = realign(
        in_file=out_rescale.outputs.rescaled_file,
        register_to_mean=False,
Ejemplo n.º 19
0
def from_native_to_mni(img, sub_id, include_trans=[True, True, True],
                       interpolation='Linear'):
    '''Maps image from native space to mni.

    WARNING THERE IS A CLEAR PROBLEM IN THE UNDERSTANDING OF TRANSFORM ORDER
    WHEN ONLY USING THE LAST TWO TRANSFORMS THE ORDER SHOULD BE INVERTED

    We assume that the transformation files already exist for the mappings
    between:
    1) mean bold and anatomy
    2) anatomy and oasis template
    3) oasis template and mni template

    The transforms to include are:
    1) From bold to anat
    2) From anat to oasis
    3) From oasis to mni

    The include transforms should be sequential to have meaninful output,
    which means that transformations sequence [True, False, True] is invalid.
    '''
    check = (include_trans == [True, False, True])
    if check:
        raise Exception('Invalid transformation sequence')
    pipeline_dir = 'pipelines/transformations'
    if not os.path.exists(pipeline_dir):
        os.makedirs(pipeline_dir)
    mem = Memory(pipeline_dir)
    transform = mem.cache(ApplyTransforms)

    anat = os.path.join('pipelines',
                        'preprocessing',
                        'sub{0}'.format(sub_id),
                        'highres001.nii')
    oasis_template = os.path.join('pipelines',
                                  'OASIS-30_Atropos_template',
                                  'T_template0.nii.gz')
    mni_template = os.path.join('pipelines',
                                'mni_icbm152_nlin_asym_09a_nifti',
                                'mni_icbm152_nlin_asym_09a',
                                'mni_icbm152_t1_tal_nlin_asym_09a.nii')
    bold_to_anat = os.path.join('pipelines', 'preprocessing',
                                'sub{0}'.format(sub_id),
                                'bold_to_anat.txt')
    anat_to_oasis = os.path.join('pipelines', 'preprocessing',
                                 'sub{0}'.format(sub_id),
                                 'anat_to_oasis.h5')
    oasis_to_mni = os.path.join('pipelines', 'preprocessing',
                                'registered_templates', 'oasis_to_mni.h5')
    all_references = [anat, oasis_template, mni_template]
    all_trans = [bold_to_anat, anat_to_oasis, oasis_to_mni]
    all_inv_trans = [False, False, False]
    transforms = []
    inv_trans_flags = []
    reference = None
    for idx, flag in enumerate(include_trans):
        if flag:
            transforms.append(all_trans[idx])
            inv_trans_flags.append(all_inv_trans[idx])
            # Use latest transformation as reference
            reference = all_references[idx]

    trans = transform(args='--float',
                      input_image_type=3,
                      interpolation=interpolation,
                      invert_transform_flags=inv_trans_flags[::-1],
                      num_threads=n_proc,
                      reference_image=reference,
                      terminal_output='file',
                      transforms=transforms[::-1],
                      input_image=img)

    return trans.outputs.output_image
Ejemplo n.º 20
0
================
Rescaling demo
================

This example compares a volume before and after T1 correction.
"""
# Load functional ASL image of KIRBY dataset first subject
import os
from procasl import datasets
kirby = datasets.fetch_kirby(subjects=[4])
raw_asl_file = kirby.asl[0]

# Create a memory context
from nipype.caching import Memory
cache_directory = '/tmp'
mem = Memory('/tmp')
os.chdir(cache_directory)
# Rescale
from procasl import preprocessing
rescale = mem.cache(preprocessing.Rescale)
out_rescale = rescale(in_file=raw_asl_file,
                      ss_tr=35.4,
                      t_i_1=800.,
                      t_i_2=1800.)

# Plot the first volume before and after rescaling
from nilearn import plotting
import matplotlib.pylab as plt
for filename, title in zip([raw_asl_file, out_rescale.outputs.rescaled_file],
                           ['raw', 'rescaled']):
    figure = plt.figure(figsize=(5, 4))
Ejemplo n.º 21
0
        else:
            print nifti_file, fmri_sessions[session_id]
            shutil.move(nifti_file, fmri_sessions[session_id])
        
        # remove the dicom dirs
        for x in glob.glob(os.path.join(dicom_dir, '*')):
            os.remove(x)
        os.removedirs(dicom_dir)
    
    ##############################################################
    # Preprocessing
    ##############################################################

    ##############################################################
    # Anatomical segmentation (White/Grey matter)
    mem = Memory(base_dir=subject_dir)
    seg = mem.cache(spm.Segment)
    out_seg = seg(data=anat_image,
                  gm_output_type=[True, True, True],
                  wm_output_type=[True, True, True],
                  csf_output_type=[True, True, True])
    sn_file = out_seg.outputs.transformation_mat
    inv_sn_file = out_seg.outputs.inverse_transformation_mat
    gm_image = out_seg.outputs.normalized_gm_image
    native_gm_image = out_seg.outputs.native_gm_image

    shutil.copyfile(native_gm_image, os.path.join(t1_dir,
        '%s_gm_image.nii' % subject))

    ##############################################################
    #  Slice timing correction
Ejemplo n.º 22
0
            print nifti_file, anat_image
            shutil.move(nifti_file, anat_image)
        else:
            print nifti_file, fmri_sessions[session_id]
            shutil.move(nifti_file, fmri_sessions[session_id])

        # remove the dicom dirs
        for x in glob.glob(os.path.join(dicom_dir, '*')):
            os.remove(x)
        os.removedirs(dicom_dir)

    ##############################################################
    # Preprocessing
    ##############################################################

    mem = Memory(base_dir=subject_dir)

    ##############################################################
    # Anatomical segmentation (White/Grey matter)

    seg = mem.cache(spm.Segment)

    out_seg = seg(data=anat_image,
                  gm_output_type=[True, True, True],
                  wm_output_type=[True, True, True],
                  csf_output_type=[True, True, True])
    sn_file = out_seg.outputs.transformation_mat
    inv_sn_file = out_seg.outputs.inverse_transformation_mat
    gm_image = out_seg.outputs.normalized_gm_image
    native_gm_image = out_seg.outputs.native_gm_image
Ejemplo n.º 23
0
# Load functional ASL image of HEROES dataset first subject
import os
from procasl import datasets

heroes = datasets.load_heroes_dataset(
    subjects=(0,),
    subjects_parent_directory=os.path.join(os.path.expanduser("~/procasl_data"), "heroes"),
    paths_patterns={"raw ASL": "fMRI/acquisition1/vismot1_rawASL*.nii"},
)
raw_asl_file = heroes["raw ASL"][0]

# Create a memory context
from nipype.caching import Memory

cache_directory = "/tmp"
mem = Memory("/tmp")
os.chdir(cache_directory)
# Rescale
from procasl import preprocessing

rescale = mem.cache(preprocessing.Rescale)
out_rescale = rescale(in_file=raw_asl_file, ss_tr=35.4, t_i_1=800.0, t_i_2=1800.0)

# Plot the first volume before and after rescaling
from nilearn import plotting
import matplotlib.pylab as plt

for filename, title in zip([raw_asl_file, out_rescale.outputs.rescaled_file], ["raw", "rescaled"]):
    figure = plt.figure(figsize=(5, 4))
    first_scan_file = preprocessing.save_first_scan(filename)
    plotting.plot_img(first_scan_file, figure=figure, display_mode="z", cut_coords=(65,), title=title, colorbar=True)
Ejemplo n.º 24
0
def coregister_fmri_session(session_data,
                            t_r,
                            write_dir,
                            brain_volume,
                            use_rats_tool=True,
                            slice_timing=True,
                            prior_rigid_body_registration=False,
                            caching=False,
                            voxel_size_x=.1,
                            voxel_size_y=.1,
                            verbose=True,
                            **environ_kwargs):
    """
    Coregistration of the subject's functional and anatomical images.
    The functional volume is aligned to the anatomical, first with a rigid body
    registration and then on a per-slice basis (only a fine correction, this is
    mostly for correction of EPI distortion).


    Parameters
    ----------
    session_data : sammba.registration.SessionData
        Single animal data, giving paths to its functional and anatomical
        image, as well as it identifier.

    t_r : float
        Repetition time for the EPI, in seconds.

    write_dir : str
        Directory to save the output and temporary images.

    brain_volume : int
        Volume of the brain in mm3 used for brain extraction.
        Typically 400 for mouse and 1800 for rat.

    use_rats_tool : bool, optional
        If True, brain mask is computed using RATS Mathematical Morphology.
        Otherwise, a histogram-based brain segmentation is used.

    prior_rigid_body_registration : bool, optional
        If True, a rigid-body registration of the anat to the func is performed
        prior to the warp. Useful if the images headers have missing/wrong
        information.

    voxel_size_x : float, optional
        Resampling resolution for the x-axis, in mm.

    voxel_size_y : float, optional
        Resampling resolution for the y-axis, in mm.

    caching : bool, optional
        Wether or not to use caching.

    verbose : bool, optional
        If True, all steps are verbose. Note that caching implies some
        verbosity in any case.

    environ_kwargs : extra arguments keywords
        Extra arguments keywords, passed to interfaces environ variable.

    Returns
    -------
    the same sequence with each animal_data updated: the following attributes
    are added
        - `output_dir_` : str
                          Path to the output directory.
        - `coreg_func_` : str
                          Path to paths to the coregistered functional image.
        - `coreg_anat_` : str
                          Path to paths to the coregistered functional image.
        - `coreg_transform_` : str
                               Path to the transform from anat to func.

    Notes
    -----
    If `use_rats_tool` is turned on, RATS tool is used for brain extraction
    and has to be cited. For more information, see
    `RATS <http://www.iibi.uiowa.edu/content/rats-overview/>`_
    """
    func_filename = session_data.func
    anat_filename = session_data.anat

    environ = {'AFNI_DECONFLICT': 'OVERWRITE'}
    for (key, value) in environ_kwargs.items():
        environ[key] = value

    if verbose:
        terminal_output = 'allatonce'
    else:
        terminal_output = 'none'

    if use_rats_tool:
        if segmentation.interfaces.Info().version() is None:
            raise ValueError('Can not locate RATS')
        else:
            ComputeMask = segmentation.MathMorphoMask
    else:
        ComputeMask = segmentation.HistogramMask

    if ants.base.Info().version is None:
        raise ValueError('Can not locate ANTS')

    if caching:
        memory = Memory(write_dir)
        tshift = memory.cache(afni.TShift)
        clip_level = memory.cache(afni.ClipLevel)
        volreg = memory.cache(afni.Volreg)
        allineate = memory.cache(afni.Allineate)
        tstat = memory.cache(afni.TStat)
        compute_mask = memory.cache(ComputeMask)
        calc = memory.cache(afni.Calc)
        allineate = memory.cache(afni.Allineate)
        allineate2 = memory.cache(afni.Allineate)
        unifize = memory.cache(afni.Unifize)
        bias_correct = memory.cache(ants.N4BiasFieldCorrection)
        catmatvec = memory.cache(afni.CatMatvec)
        warp = memory.cache(afni.Warp)
        resample = memory.cache(afni.Resample)
        slicer = memory.cache(afni.ZCutUp)
        warp_apply = memory.cache(afni.NwarpApply)
        qwarp = memory.cache(afni.Qwarp)
        merge = memory.cache(afni.Zcat)
        copy_geom = memory.cache(fsl.CopyGeom)
        overwrite = False
        for step in [
                tshift, volreg, allineate, allineate2, tstat, compute_mask,
                calc, unifize, resample, slicer, warp_apply, qwarp, merge
        ]:
            step.interface().set_default_terminal_output(terminal_output)
    else:
        tshift = afni.TShift(terminal_output=terminal_output).run
        clip_level = afni.ClipLevel().run
        volreg = afni.Volreg(terminal_output=terminal_output).run
        allineate = afni.Allineate(terminal_output=terminal_output).run
        allineate2 = afni.Allineate(terminal_output=terminal_output
                                    ).run  # TODO: remove after fixed bug
        tstat = afni.TStat(terminal_output=terminal_output).run
        compute_mask = ComputeMask().run
        calc = afni.Calc(terminal_output=terminal_output).run
        unifize = afni.Unifize(terminal_output=terminal_output).run
        bias_correct = ants.N4BiasFieldCorrection(
            terminal_output=terminal_output).run
        catmatvec = afni.CatMatvec().run
        warp = afni.Warp().run
        resample = afni.Resample(terminal_output=terminal_output).run
        slicer = afni.ZCutUp(terminal_output=terminal_output).run
        warp_apply = afni.NwarpApply(terminal_output=terminal_output).run
        qwarp = afni.Qwarp(terminal_output=terminal_output).run
        merge = afni.Zcat(terminal_output=terminal_output).run
        copy_geom = fsl.CopyGeom(terminal_output=terminal_output).run
        overwrite = True

    session_data._check_inputs()
    output_dir = os.path.join(os.path.abspath(write_dir),
                              session_data.animal_id)
    session_data._set_output_dir_(output_dir)
    current_dir = os.getcwd()
    os.chdir(output_dir)
    output_files = []

    #######################################
    # Correct functional for slice timing #
    #######################################
    if slice_timing:
        out_tshift = tshift(in_file=func_filename,
                            outputtype='NIFTI_GZ',
                            tpattern='altplus',
                            tr=str(t_r),
                            environ=environ)
        func_filename = out_tshift.outputs.out_file
        output_files.append(func_filename)

    ################################################
    # Register functional volumes to the first one #
    ################################################
    # XXX why do you need a thresholded image ?
    out_clip_level = clip_level(in_file=func_filename)
    out_calc_threshold = calc(in_file_a=func_filename,
                              expr='ispositive(a-{0}) * a'.format(
                                  out_clip_level.outputs.clip_val),
                              outputtype='NIFTI_GZ')
    thresholded_filename = out_calc_threshold.outputs.out_file

    out_volreg = volreg(  # XXX dfile not saved
        in_file=thresholded_filename,
        outputtype='NIFTI_GZ',
        environ=environ,
        oned_file=fname_presuffix(thresholded_filename,
                                  suffix='Vr.1Dfile.1D',
                                  use_ext=False),
        oned_matrix_save=fname_presuffix(thresholded_filename,
                                         suffix='Vr.aff12.1D',
                                         use_ext=False))

    # Apply the registration to the whole head
    out_allineate = allineate(in_file=func_filename,
                              master=func_filename,
                              in_matrix=out_volreg.outputs.oned_matrix_save,
                              out_file=fname_presuffix(func_filename,
                                                       suffix='Av'),
                              environ=environ)

    # 3dAllineate removes the obliquity. This is not a good way to readd it as
    # removes motion correction info in the header if it were an AFNI file...as
    # it happens it's NIfTI which does not store that so irrelevant!
    out_copy_geom = copy_geom(dest_file=out_allineate.outputs.out_file,
                              in_file=out_volreg.outputs.out_file)

    allineated_filename = out_copy_geom.outputs.out_file

    # Create a (hopefully) nice mean image for use in the registration
    out_tstat = tstat(in_file=allineated_filename,
                      args='-mean',
                      outputtype='NIFTI_GZ',
                      environ=environ)

    # Update outputs
    output_files.extend([
        thresholded_filename, out_volreg.outputs.oned_matrix_save,
        out_volreg.outputs.out_file, out_volreg.outputs.md1d_file,
        allineated_filename, out_tstat.outputs.out_file
    ])

    ###########################################
    # Corret anat and func for intensity bias #
    ###########################################
    # Correct the functional average for intensities bias
    out_bias_correct = bias_correct(input_image=out_tstat.outputs.out_file)
    unbiased_func_filename = out_bias_correct.outputs.output_image

    # Bias correct the antomical image
    out_unifize = unifize(in_file=anat_filename,
                          outputtype='NIFTI_GZ',
                          environ=environ)
    unbiased_anat_filename = out_unifize.outputs.out_file

    # Update outputs
    output_files.extend([unbiased_func_filename, unbiased_anat_filename])

    #############################################
    # Rigid-body registration anat -> mean func #
    #############################################
    if prior_rigid_body_registration:
        # Mask the mean functional volume outside the brain.
        out_clip_level = clip_level(in_file=unbiased_func_filename)
        out_compute_mask_func = compute_mask(
            in_file=unbiased_func_filename,
            volume_threshold=brain_volume,
            intensity_threshold=int(out_clip_level.outputs.clip_val))
        out_cacl_func = calc(in_file_a=unbiased_func_filename,
                             in_file_b=out_compute_mask_func.outputs.out_file,
                             expr='a*b',
                             outputtype='NIFTI_GZ',
                             environ=environ)

        # Mask the anatomical volume outside the brain.
        out_clip_level = clip_level(in_file=unbiased_anat_filename)
        out_compute_mask_anat = compute_mask(
            in_file=unbiased_anat_filename,
            volume_threshold=brain_volume,
            intensity_threshold=int(out_clip_level.outputs.clip_val))
        out_cacl_anat = calc(in_file_a=unbiased_anat_filename,
                             in_file_b=out_compute_mask_anat.outputs.out_file,
                             expr='a*b',
                             outputtype='NIFTI_GZ',
                             environ=environ)

        # Compute the transformation from functional to anatomical brain
        # XXX: why in this sense
        out_allineate = allineate2(
            in_file=out_cacl_func.outputs.out_file,
            reference=out_cacl_anat.outputs.out_file,
            out_matrix=fname_presuffix(out_cacl_func.outputs.out_file,
                                       suffix='_shr.aff12.1D',
                                       use_ext=False),
            center_of_mass='',
            warp_type='shift_rotate',
            out_file=fname_presuffix(out_cacl_func.outputs.out_file,
                                     suffix='_shr'),
            environ=environ)
        rigid_transform_file = out_allineate.outputs.out_matrix
        output_files.extend([
            out_compute_mask_func.outputs.out_file,
            out_cacl_func.outputs.out_file,
            out_compute_mask_anat.outputs.out_file,
            out_cacl_anat.outputs.out_file, rigid_transform_file,
            out_allineate.outputs.out_file
        ])

        # apply the inverse transform to register the anatomical to the func
        catmatvec_out_file = fname_presuffix(rigid_transform_file,
                                             suffix='INV')
        out_catmatvec = catmatvec(in_file=[(rigid_transform_file, 'I')],
                                  oneline=True,
                                  out_file=catmatvec_out_file)
        output_files.append(out_catmatvec.outputs.out_file)
        out_allineate = allineate(in_file=unbiased_anat_filename,
                                  master=unbiased_func_filename,
                                  in_matrix=out_catmatvec.outputs.out_file,
                                  out_file=fname_presuffix(
                                      unbiased_anat_filename,
                                      suffix='_shr_in_func_space'),
                                  environ=environ)
        allineated_anat_filename = out_allineate.outputs.out_file
        output_files.append(allineated_anat_filename)
    else:
        allineated_anat_filename = unbiased_anat_filename

    ############################################
    # Nonlinear registration anat -> mean func #
    ############################################
    # 3dWarp doesn't put the obliquity in the header, so do it manually
    # This step generates one file per slice and per time point, so we are
    # making sure they are removed at the end
    out_warp = warp(in_file=allineated_anat_filename,
                    oblique_parent=unbiased_func_filename,
                    interp='quintic',
                    gridset=unbiased_func_filename,
                    outputtype='NIFTI_GZ',
                    verbose=True,
                    environ=environ)
    registered_anat_filename = out_warp.outputs.out_file
    registered_anat_oblique_filename = fix_obliquity(registered_anat_filename,
                                                     unbiased_func_filename,
                                                     verbose=verbose)

    # Concatenate all the anat to func tranforms
    mat_filename = fname_presuffix(registered_anat_filename,
                                   suffix='_warp.mat',
                                   use_ext=False)
    # XXX Handle this correctly according to caching
    if not os.path.isfile(mat_filename):
        np.savetxt(mat_filename, [out_warp.runtime.stdout], fmt='%s')
        output_files.append(mat_filename)

    transform_filename = fname_presuffix(registered_anat_filename,
                                         suffix='_anat_to_func.aff12.1D',
                                         use_ext=False)
    if prior_rigid_body_registration:
        _ = catmatvec(in_file=[(mat_filename, 'ONELINE'),
                               (rigid_transform_file, 'ONELINE')],
                      oneline=True,
                      out_file=transform_filename)
    else:
        _ = catmatvec(in_file=[(mat_filename, 'ONELINE')],
                      oneline=True,
                      out_file=transform_filename)

    ##################################################
    # Per-slice non-linear registration func -> anat #
    ##################################################
    # Slice anatomical image
    anat_img = nibabel.load(registered_anat_oblique_filename)
    anat_n_slices = anat_img.header.get_data_shape()[2]
    sliced_registered_anat_filenames = []
    for slice_n in range(anat_n_slices):
        out_slicer = slicer(in_file=registered_anat_oblique_filename,
                            keep='{0} {0}'.format(slice_n),
                            out_file=fname_presuffix(
                                registered_anat_oblique_filename,
                                suffix='Sl%d' % slice_n),
                            environ=environ)
        oblique_slice = fix_obliquity(out_slicer.outputs.out_file,
                                      registered_anat_oblique_filename,
                                      verbose=verbose)
        sliced_registered_anat_filenames.append(oblique_slice)

    # Slice mean functional
    sliced_bias_corrected_filenames = []
    img = nibabel.load(func_filename)
    n_slices = img.header.get_data_shape()[2]
    for slice_n in range(n_slices):
        out_slicer = slicer(in_file=unbiased_func_filename,
                            keep='{0} {0}'.format(slice_n),
                            out_file=fname_presuffix(unbiased_func_filename,
                                                     suffix='Sl%d' % slice_n),
                            environ=environ)
        oblique_slice = fix_obliquity(out_slicer.outputs.out_file,
                                      unbiased_func_filename,
                                      verbose=verbose)
        sliced_bias_corrected_filenames.append(oblique_slice)

    # Below line is to deal with slices where there is no signal (for example
    # rostral end of some anatomicals)

    # The inverse warp frequently fails, Resampling can help it work better
    # XXX why specifically .1 in voxel_size ?
    voxel_size_z = anat_img.header.get_zooms()[2]
    resampled_registered_anat_filenames = []
    for sliced_registered_anat_filename in sliced_registered_anat_filenames:
        out_resample = resample(in_file=sliced_registered_anat_filename,
                                voxel_size=(voxel_size_x, voxel_size_y,
                                            voxel_size_z),
                                outputtype='NIFTI_GZ',
                                environ=environ)
        resampled_registered_anat_filenames.append(
            out_resample.outputs.out_file)

    resampled_bias_corrected_filenames = []
    for sliced_bias_corrected_filename in sliced_bias_corrected_filenames:
        out_resample = resample(in_file=sliced_bias_corrected_filename,
                                voxel_size=(voxel_size_x, voxel_size_y,
                                            voxel_size_z),
                                outputtype='NIFTI_GZ',
                                environ=environ)
        resampled_bias_corrected_filenames.append(
            out_resample.outputs.out_file)

    # single slice non-linear functional to anatomical registration
    warped_slices = []
    warp_filenames = []
    for (resampled_bias_corrected_filename,
         resampled_registered_anat_filename) in zip(
             resampled_bias_corrected_filenames,
             resampled_registered_anat_filenames):
        warped_slice = fname_presuffix(resampled_bias_corrected_filename,
                                       suffix='_qw')
        out_qwarp = qwarp(
            in_file=resampled_bias_corrected_filename,
            base_file=resampled_registered_anat_filename,
            iwarp=True,  # XXX: is this necessary
            noneg=True,
            blur=[0],
            nmi=True,
            noXdis=True,
            allineate=True,
            allineate_opts='-parfix 1 0 -parfix 2 0 -parfix 3 0 '
            '-parfix 4 0 -parfix 5 0 -parfix 6 0 '
            '-parfix 7 0 -parfix 9 0 '
            '-parfix 10 0 -parfix 12 0',
            out_file=warped_slice,
            environ=environ)
        warped_slices.append(out_qwarp.outputs.warped_source)
        warp_filenames.append(out_qwarp.outputs.source_warp)
        output_files.append(out_qwarp.outputs.base_warp)
        # There are files geenrated by the allineate option
        output_files.extend([
            fname_presuffix(out_qwarp.outputs.warped_source, suffix='_Allin'),
            fname_presuffix(out_qwarp.outputs.warped_source,
                            suffix='_Allin.nii',
                            use_ext=False),
            fname_presuffix(out_qwarp.outputs.warped_source,
                            suffix='_Allin.aff12.1D',
                            use_ext=False)
        ])

    # Resample the mean volume back to the initial resolution,
    voxel_size = nibabel.load(unbiased_func_filename).header.get_zooms()
    resampled_warped_slices = []
    for warped_slice in warped_slices:
        out_resample = resample(in_file=warped_slice,
                                voxel_size=voxel_size,
                                outputtype='NIFTI_GZ',
                                environ=environ)
        resampled_warped_slices.append(out_resample.outputs.out_file)

    # fix the obliquity
    resampled_warped_slices_oblique = []
    for (sliced_registered_anat_filename,
         resampled_warped_slice) in zip(sliced_registered_anat_filenames,
                                        resampled_warped_slices):
        oblique_slice = fix_obliquity(resampled_warped_slice,
                                      sliced_registered_anat_filename,
                                      verbose=verbose)
        resampled_warped_slices_oblique.append(oblique_slice)

    # slice functional
    sliced_func_filenames = []
    for slice_n in range(n_slices):
        out_slicer = slicer(in_file=allineated_filename,
                            keep='{0} {0}'.format(slice_n),
                            out_file=fname_presuffix(allineated_filename,
                                                     suffix='Sl%d' % slice_n),
                            environ=environ)
        oblique_slice = fix_obliquity(out_slicer.outputs.out_file,
                                      allineated_filename,
                                      verbose=verbose)
        sliced_func_filenames.append(oblique_slice)

    # Apply the precomputed warp slice by slice
    warped_func_slices = []
    for (sliced_func_filename, warp_filename) in zip(sliced_func_filenames,
                                                     warp_filenames):
        out_warp_apply = warp_apply(in_file=sliced_func_filename,
                                    master=sliced_func_filename,
                                    warp=warp_filename,
                                    out_file=fname_presuffix(
                                        sliced_func_filename, suffix='_qw'),
                                    environ=environ)
        warped_func_slices.append(out_warp_apply.outputs.out_file)

    # Finally, merge all slices !
    out_merge_func = merge(in_files=warped_func_slices,
                           outputtype='NIFTI_GZ',
                           environ=environ)

    # Fix the obliquity
    merged_oblique = fix_obliquity(out_merge_func.outputs.out_file,
                                   allineated_filename,
                                   verbose=verbose)

    # Update the fmri data
    setattr(session_data, "coreg_func_", merged_oblique)
    setattr(session_data, "coreg_anat_", registered_anat_oblique_filename)
    setattr(session_data, "coreg_transform_", transform_filename)
    os.chdir(current_dir)

    # Collect the outputs
    output_files.extend(sliced_registered_anat_filenames +
                        sliced_bias_corrected_filenames +
                        resampled_registered_anat_filenames +
                        resampled_bias_corrected_filenames + warped_slices +
                        warp_filenames + resampled_warped_slices_oblique +
                        sliced_func_filenames + warped_func_slices)
    if not caching:
        for out_file in output_files:
            if os.path.isfile(out_file):
                os.remove(out_file)
Ejemplo n.º 25
0
def coregister(unifized_anat_file,
               unbiased_mean_func_file,
               write_dir,
               anat_brain_file=None,
               func_brain_file=None,
               slice_timing=True,
               t_r=None,
               prior_rigid_body_registration=None,
               reorient_only=False,
               voxel_size_x=.1,
               voxel_size_y=.1,
               caching=False,
               verbose=True,
               **environ_kwargs):
    """
    Coregistration of the subject's functional and anatomical images.
    The functional volume is aligned to the anatomical, first with a
    rigid body registration and then on a per-slice basis (only a fine
    correction, this is mostly for correction of EPI distortion).

    Parameters
    ----------
    unbiased_mean_func_file : str
        Path to the slice-time corrected, volume registrated, averaged
        and bias field corrected functional file
    prior_rigid_body_registration : bool, optional
        If True, a rigid-body registration of the anat to the func is
        performed prior to the warp. Useful if the images headers have
        missing/wrong information.
        NOTE: prior_rigid_body_registration is deprecated from 0.1 and will be
        removed in next release. Use `reorient_only` instead.
    reorient_only :  bool, optional
        If True, the rigid-body registration of the anat to the func is not
        performed and only reorientation is done.
    voxel_size_x : float, optional
        Resampling resolution for the x-axis, in mm.
    voxel_size_y : float, optional
        Resampling resolution for the y-axis, in mm.
    caching : bool, optional
        Wether or not to use caching.
    verbose : bool, optional
        If True, all steps are verbose. Note that caching implies some
        verbosity in any case.
    environ_kwargs : extra arguments keywords
        Extra arguments keywords, passed to interfaces environ variable.

    Returns
    -------
    The following attributes are added
        - `coreg_func_` : str
                          Path to paths to the coregistered functional
                          image.
        - `coreg_anat_` : str
                          Path to paths to the coregistered functional
                          image.
        - `coreg_transform_` : str
                               Path to the transform from anat to func.
    Notes
    -----
    If `use_rats_tool` is turned on, RATS tool is used for brain extraction
    and has to be cited. For more information, see
    `RATS <http://www.iibi.uiowa.edu/content/rats-overview/>`_
    """
    if prior_rigid_body_registration is not None:
        warn_str = ("The parameter 'prior_rigid_body_registration' is "
                    "deprecated and will be removed in sammba-mri next "
                    "release. Use parameter 'reorient_only' instead.")
        warnings.warn(warn_str, VisibleDeprecationWarning, stacklevel=2)
        reorient_only = not (prior_rigid_body_registration)

    environ = {'AFNI_DECONFLICT': 'OVERWRITE'}
    if verbose:
        terminal_output = 'allatonce'
    else:
        terminal_output = 'none'

    if caching:
        memory = Memory(write_dir)
        catmatvec = memory.cache(afni.CatMatvec)
    else:
        catmatvec = afni.CatMatvec().run

    for (key, value) in environ_kwargs.items():
        environ[key] = value

    output_files = []

    #############################################
    # Rigid-body registration anat -> mean func #
    #############################################
    if reorient_only:
        allineated_anat_file = unifized_anat_file
    else:
        if anat_brain_file is None:
            raise ValueError("'anat_brain_mask_file' is needed for "
                             "rigid-body registration")
        if func_brain_file is None:
            raise ValueError("'func_brain_mask_file' is needed for "
                             "rigid-body registration")
        allineated_anat_file, rigid_transform_file = \
            _rigid_body_register(unifized_anat_file,
                                 unbiased_mean_func_file,
                                 anat_brain_file,
                                 func_brain_file,
                                 write_dir=write_dir,
                                 caching=caching,
                                 terminal_output=terminal_output,
                                 environ=environ)
        output_files.extend([rigid_transform_file, allineated_anat_file])

    ############################################
    # Nonlinear registration anat -> mean func #
    ############################################
    registered_anat_oblique_file, mat_file =\
        _warp(allineated_anat_file, unbiased_mean_func_file, write_dir,
              caching=caching, verbose=verbose,
              terminal_output=terminal_output,
              environ=environ)

    output_files.append(mat_file)
    transform_file = fname_presuffix(registered_anat_oblique_file,
                                     suffix='_func_to_anat.aff12.1D',
                                     use_ext=False)
    if reorient_only:
        _ = catmatvec(in_file=[(mat_file, 'ONELINE')],
                      oneline=True,
                      out_file=transform_file,
                      environ=environ)
    else:
        _ = catmatvec(in_file=[(rigid_transform_file, 'ONELINE'),
                               (mat_file, 'ONELINE')],
                      oneline=True,
                      out_file=transform_file,
                      environ=environ)

    ##################################################
    # Per-slice non-linear registration func -> anat #
    ##################################################
    warped_mean_func_file, warp_files, _ =\
        _per_slice_qwarp(unbiased_mean_func_file,
                         registered_anat_oblique_file, voxel_size_x,
                         voxel_size_y,
                         write_dir=write_dir, verbose=verbose,
                         caching=caching, terminal_output=terminal_output,
                         environ=environ)

    # Update the outputs
    if not caching:
        for out_file in output_files:
            os.remove(out_file)

    return Bunch(coreg_func_=warped_mean_func_file,
                 coreg_anat_=registered_anat_oblique_file,
                 coreg_transform_=transform_file,
                 coreg_warps_=warp_files)
Ejemplo n.º 26
0
    def check_input(self, gui=True):
        print "**** Check Inputs ****"
        diffusion_available = False
        t1_available = False
        t2_available = False
        valid_inputs = False

        mem = Memory(base_dir=os.path.join(self.base_directory, "NIPYPE"))
        swap_and_reorient = mem.cache(SwapAndReorient)

        # Check for (and if existing, convert) diffusion data
        diffusion_model = []
        for model in ["DSI", "DTI", "HARDI"]:
            input_dir = os.path.join(self.base_directory, "RAWDATA", model)
            if len(os.listdir(input_dir)) > 0:
                if convert_rawdata(self.base_directory, input_dir, model):
                    diffusion_available = True
                    diffusion_model.append(model)

        # Check for (and if existing, convert)  T1
        input_dir = os.path.join(self.base_directory, "RAWDATA", "T1")
        if len(os.listdir(input_dir)) > 0:
            if convert_rawdata(self.base_directory, input_dir, "T1_orig"):
                t1_available = True

        # Check for (and if existing, convert)  T2
        input_dir = os.path.join(self.base_directory, "RAWDATA", "T2")
        if len(os.listdir(input_dir)) > 0:
            if convert_rawdata(self.base_directory, input_dir, "T2_orig"):
                t2_available = True

        if diffusion_available:
            # project.stages['Diffusion'].config.imaging_model_choices = diffusion_model
            if t2_available:
                swap_and_reorient(
                    src_file=os.path.join(self.base_directory, "NIFTI", "T2_orig.nii.gz"),
                    ref_file=os.path.join(self.base_directory, "NIFTI", diffusion_model[0] + ".nii.gz"),
                    out_file=os.path.join(self.base_directory, "NIFTI", "T2.nii.gz"),
                )
            if t1_available:
                swap_and_reorient(
                    src_file=os.path.join(self.base_directory, "NIFTI", "T1_orig.nii.gz"),
                    ref_file=os.path.join(self.base_directory, "NIFTI", diffusion_model[0] + ".nii.gz"),
                    out_file=os.path.join(self.base_directory, "NIFTI", "T1.nii.gz"),
                )
                valid_inputs = True
                input_message = "Inputs check finished successfully.\nDiffusion and morphological data available."
            else:
                input_message = "Error during inputs check.\nMorphological data (T1) not available."
        elif t1_available:
            input_message = "Error during inputs check. \nDiffusion data not available (DSI/DTI/HARDI)."
        else:
            input_message = (
                "Error during inputs check. No diffusion or morphological data available in folder "
                + os.path.join(self.base_directory, "RAWDATA")
                + "!"
            )

        imaging_model = diffusion_model[0]

        if gui:
            input_notification = Check_Input_Notification(
                message=input_message, imaging_model_options=diffusion_model, imaging_model=imaging_model
            )
            input_notification.configure_traits()
            self.global_conf.imaging_model = input_notification.imaging_model
            diffusion_file = os.path.join(self.base_directory, "NIFTI", input_notification.imaging_model + ".nii.gz")
            n_vol = nib.load(diffusion_file).shape[3]
            if (
                self.stages["Preprocessing"].config.end_vol == 0
                or self.stages["Preprocessing"].config.end_vol == self.stages["Preprocessing"].config.max_vol
                or self.stages["Preprocessing"].config.end_vol >= n_vol - 1
            ):
                self.stages["Preprocessing"].config.end_vol = n_vol - 1
            self.stages["Preprocessing"].config.max_vol = n_vol - 1
            self.stages["Registration"].config.imaging_model = input_notification.imaging_model
            self.stages["Diffusion"].config.imaging_model = input_notification.imaging_model
        else:
            print input_message
            self.global_conf.imaging_model = imaging_model
            diffusion_file = os.path.join(self.base_directory, "NIFTI", imaging_model + ".nii.gz")
            n_vol = nib.load(diffusion_file).shape[3]
            if (
                self.stages["Preprocessing"].config.end_vol == 0
                or self.stages["Preprocessing"].config.end_vol == self.stages["Preprocessing"].config.max_vol
                or self.stages["Preprocessing"].config.end_vol >= n_vol - 1
            ):
                self.stages["Preprocessing"].config.end_vol = n_vol - 1
            self.stages["Preprocessing"].config.max_vol = n_vol - 1
            self.stages["Registration"].config.imaging_model = imaging_model
            self.stages["Diffusion"].config.imaging_model = imaging_model

        if t2_available:
            self.stages["Registration"].config.registration_mode_trait = [
                "Linear (FSL)",
                "BBregister (FS)",
                "Nonlinear (FSL)",
            ]

        self.fill_stages_outputs()

        return valid_inputs
Ejemplo n.º 27
0
Realignment demo
================

This example compares standard realignement to realignement with tagging
correction.
"""
# Load 4D ASL image of KIRBY dataset first subject
import os
from procasl import datasets
kirby = datasets.fetch_kirby(subjects=[4])
raw_asl_file = kirby.asl[0]

# Create a memory context
from nipype.caching import Memory
cache_directory = '/tmp'
mem = Memory('/tmp')
os.chdir(cache_directory)

# Realign with and without tagging correction
from procasl import preprocessing
import numpy as np
realign = mem.cache(preprocessing.ControlTagRealign)
x_translation = {}
for correct_tagging in [True, False]:
    out_realign = realign(in_file=raw_asl_file,
                          correct_tagging=correct_tagging)
    x_translation[correct_tagging] = np.loadtxt(
        out_realign.outputs.realignment_parameters)[:, 2]

# Plot x-translation parameters with and without tagging correction
import matplotlib.pylab as plt
Ejemplo n.º 28
0
def from_native_to_mni(img,
                       sub_id,
                       include_trans=[True, True, True],
                       interpolation='Linear'):
    '''Maps image from native space to mni.

    WARNING THERE IS A CLEAR PROBLEM IN THE UNDERSTANDING OF TRANSFORM ORDER
    WHEN ONLY USING THE LAST TWO TRANSFORMS THE ORDER SHOULD BE INVERTED

    We assume that the transformation files already exist for the mappings
    between:
    1) mean bold and anatomy
    2) anatomy and oasis template
    3) oasis template and mni template

    The transforms to include are:
    1) From bold to anat
    2) From anat to oasis
    3) From oasis to mni

    The include transforms should be sequential to have meaninful output,
    which means that transformations sequence [True, False, True] is invalid.
    '''
    check = (include_trans == [True, False, True])
    if check:
        raise Exception('Invalid transformation sequence')
    pipeline_dir = 'pipelines/transformations'
    if not os.path.exists(pipeline_dir):
        os.makedirs(pipeline_dir)
    mem = Memory(pipeline_dir)
    transform = mem.cache(ApplyTransforms)

    anat = os.path.join('pipelines', 'preprocessing', 'sub{0}'.format(sub_id),
                        'highres001.nii')
    oasis_template = os.path.join('pipelines', 'OASIS-30_Atropos_template',
                                  'T_template0.nii.gz')
    mni_template = os.path.join('pipelines', 'mni_icbm152_nlin_asym_09a_nifti',
                                'mni_icbm152_nlin_asym_09a',
                                'mni_icbm152_t1_tal_nlin_asym_09a.nii')
    bold_to_anat = os.path.join('pipelines', 'preprocessing',
                                'sub{0}'.format(sub_id), 'bold_to_anat.txt')
    anat_to_oasis = os.path.join('pipelines', 'preprocessing',
                                 'sub{0}'.format(sub_id), 'anat_to_oasis.h5')
    oasis_to_mni = os.path.join('pipelines', 'preprocessing',
                                'registered_templates', 'oasis_to_mni.h5')
    all_references = [anat, oasis_template, mni_template]
    all_trans = [bold_to_anat, anat_to_oasis, oasis_to_mni]
    all_inv_trans = [False, False, False]
    transforms = []
    inv_trans_flags = []
    reference = None
    for idx, flag in enumerate(include_trans):
        if flag:
            transforms.append(all_trans[idx])
            inv_trans_flags.append(all_inv_trans[idx])
            # Use latest transformation as reference
            reference = all_references[idx]

    trans = transform(args='--float',
                      input_image_type=3,
                      interpolation=interpolation,
                      invert_transform_flags=inv_trans_flags[::-1],
                      num_threads=n_proc,
                      reference_image=reference,
                      terminal_output='file',
                      transforms=transforms[::-1],
                      input_image=img)

    return trans.outputs.output_image
Ejemplo n.º 29
0
def from_mni_to_native(img,
                       sub_id,
                       include_trans=[True, True, True],
                       interpolation='Linear'):
    '''Maps image from native space to mni.

    We assume that the transformation files already exist for the mappings
    between:
    1) mean bold and anatomy
    2) anatomy and oasis template
    3) oasis template and mni template

    The transforms to include are:
    1) From mni to oasis
    2) From oasis to anat
    3) From anat to bold

    The include transforms should be sequential to have meaninful output,
    which means that transformations sequence [True, False, True] is invalid.
    '''
    check = (include_trans == [True, False, True])
    if check:
        raise Exception('Invalid transformation sequence')
    pipeline_dir = 'pipelines/transformations'
    if not os.path.exists(pipeline_dir):
        os.makedirs(pipeline_dir)
    mem = Memory(pipeline_dir)
    transform = mem.cache(ApplyTransforms)

    oasis_template = os.path.join('pipelines', 'OASIS-30_Atropos_template',
                                  'T_template0.nii.gz')
    anat = os.path.join('pipelines', 'preprocessing', 'sub{0}'.format(sub_id),
                        'highres001.nii')
    mean_bold = os.path.join('pipelines', 'preprocessing',
                             'sub{0}'.format(sub_id), 'mean_bold.nii')
    mni_to_oasis = os.path.join('pipelines', 'preprocessing',
                                'registered_templates', 'mni_to_oasis.h5')
    oasis_to_anat = os.path.join('pipelines', 'preprocessing',
                                 'sub{0}'.format(sub_id), 'oasis_to_anat.h5')
    bold_to_anat = os.path.join('pipelines', 'preprocessing',
                                'sub{0}'.format(sub_id), 'bold_to_anat.txt')

    all_references = [oasis_template, anat, mean_bold]
    all_trans = [mni_to_oasis, oasis_to_anat, bold_to_anat]
    all_inv_trans = [False, False, True]
    transforms = []
    inv_trans_flags = []
    reference = None
    for idx, flag in enumerate(include_trans):
        if flag:
            transforms.append(all_trans[idx])
            inv_trans_flags.append(all_inv_trans[idx])
            # Use latest transformation as reference
            reference = all_references[idx]

    trans = transform(args='--float',
                      input_image_type=3,
                      interpolation=interpolation,
                      invert_transform_flags=inv_trans_flags[::-1],
                      num_threads=n_proc,
                      reference_image=reference,
                      terminal_output='file',
                      transforms=transforms[::-1],
                      input_image=img)

    return trans.outputs.output_image
Ejemplo n.º 30
0
def do_subject_preproc(subject_id,
                       output_dir,
                       func,
                       anat,
                       do_bet=True,
                       do_mc=True,
                       do_coreg=True,
                       do_normalize=True,
                       cmd_prefix="fsl5.0-",
                       **kwargs
                       ):
    """
    Preprocesses subject data using FSL.

    Parameters
    ----------

    """

    output = {'func': func,
              'anat': anat
              }

    # output dir
    subject_output_dir = os.path.join(output_dir, subject_id)
    if not os.path.exists(subject_output_dir):
        os.makedirs(subject_output_dir)

    # prepare for smart-caching
    cache_dir = os.path.join(output_dir, "cache_dir")
    if not os.path.exists(cache_dir):
        os.makedirs(cache_dir)

    nipype_mem = NipypeMemory(base_dir=cache_dir)
    joblib_mem = JoblibMemory(cache_dir, verbose=100)

    # sanitize input files
    if not isinstance(output['func'], basestring):
        output['func'] = joblib_mem.cache(do_fsl_merge)(
            func, subject_output_dir, output_prefix='Merged',
            cmd_prefix=cmd_prefix)

    ######################
    #  Brain Extraction
    ######################
    if do_bet:
        if not fsl.BET._cmd.startswith("fsl"):
            fsl.BET._cmd = cmd_prefix + fsl.BET._cmd

        bet = nipype_mem.cache(fsl.BET)
        bet_results = bet(in_file=output['anat'],
                          )

        output['anat'] = bet_results.outputs.out_file

    #######################
    #  Motion correction
    #######################
    if do_mc:
        if not fsl.MCFLIRT._cmd.startswith("fsl"):
            fsl.MCFLIRT._cmd = cmd_prefix + fsl.MCFLIRT._cmd

        mcflirt = nipype_mem.cache(fsl.MCFLIRT)
        mcflirt_results = mcflirt(in_file=output['func'],
                                  cost='mutualinfo',
                                  save_mats=True,  # save mc matrices
                                  save_plots=True  # save mc params
                                  )

        output['motion_parameters'] = mcflirt_results.outputs.par_file
        output['motion_matrices'] = mcflirt_results.outputs.mat_file
        output['func'] = mcflirt_results.outputs.out_file

    ###################
    # Coregistration
    ###################
    if do_coreg:
        if not fsl.FLIRT._cmd.startswith("fsl"):
            fsl.FLIRT._cmd = cmd_prefix + fsl.FLIRT._cmd

        flirt1 = nipype_mem.cache(fsl.FLIRT)
        flirt1_results = flirt1(in_file=output['func'],
                                reference=output['anat']
                                )

        if not do_normalize:
            output['func'] = flirt1_results.outputs.out_file

    ##########################
    # Spatial normalization
    ##########################
    if do_normalize:
        if not fsl.FLIRT._cmd.startswith("fsl"):
            fsl.FLIRT._cmd = cmd_prefix + fsl.FLIRT._cmd

        # T1 normalization
        flirt2 = nipype_mem.cache(fsl.FLIRT)
        flirt2_results = flirt2(in_file=output['anat'],
                                reference=FSL_T1_TEMPLATE)

        output['anat'] = flirt2_results.outputs.out_file

        # concatenate 'func -> anat' and 'anat -> standard space'
        # transformation matrices to obtaun 'func -> standard space'
        # transformation matrix
        if do_coreg:
            if not fsl.ConvertXFM._cmd.startswith("fsl"):
                fsl.ConvertXFM._cmd = cmd_prefix + fsl.ConvertXFM._cmd

                convertxfm = nipype_mem.cache(fsl.ConvertXFM)
                convertxfm_results = convertxfm(
                    in_file=flirt1_results.outputs.out_matrix_file,
                    in_file2=flirt2_results.outputs.out_matrix_file,
                    concat_xfm=True
                    )

        # warp func data into standard space by applying
        # 'func -> standard space' transformation matrix
        if not fsl.ApplyXfm._cmd.startswith("fsl"):
            fsl.ApplyXfm._cmd = cmd_prefix + fsl.ApplyXfm._cmd

        applyxfm = nipype_mem.cache(fsl.ApplyXfm)
        applyxfm_results = applyxfm(
            in_file=output['func'],
            in_matrix_file=convertxfm_results.outputs.out_file,
            reference=FSL_T1_TEMPLATE
            )

        output['func'] = applyxfm_results.outputs.out_file

    return output
"""
================
Realignment demo
================

This example compares standard realignement to realignement with tagging
correction.
"""
# Create a memory context
from nipype.caching import Memory

mem = Memory("/tmp")

# Give the path to the 4D ASL image
raw_asl_file = "/tmp/func.nii"

# Realign with and without tagging correction
from procasl import preprocessing
import numpy as np

realign = mem.cache(preprocessing.Realign)
x_translation = {}
for correct_tagging in [True, False]:
    out_realign = realign(in_file=raw_asl_file, correct_tagging=correct_tagging)
    x_translation[correct_tagging] = np.loadtxt(out_realign.outputs.realignment_parameters)[:, 2]

# Plot x-translation parameters with and without tagging correction
import matplotlib.pylab as plt

plt.figure(figsize=(10, 5))
for correct_tagging, label, color in zip([True, False], ["corrected", "uncorrected"], "rb"):
Ejemplo n.º 32
0
    out.runtime.cwd
"""

from nipype.interfaces import fsl
fsl.FSLCommand.set_default_output_type('NIFTI')

from nipype.caching import Memory

import glob

# First retrieve the list of files that we want to work upon
in_files = glob.glob('data/*/f3.nii')

# Create a memory context
mem = Memory('.')

# Apply an arbitrary (and pointless, here) threshold to the files)
threshold = [mem.cache(fsl.Threshold)(in_file=f, thresh=i)
                        for i, f in enumerate(in_files)]

# Merge all these files along the time dimension
out_merge = mem.cache(fsl.Merge)(dimension="t",
                            in_files=[t.outputs.out_file for t in threshold],
                        )
# And finally compute the mean
out_mean = mem.cache(fsl.MeanImage)(in_file=out_merge.outputs.merged_file)

# To avoid having increasing disk size we can keep only what was touched
# in this run
#mem.clear_previous_runs()
Ejemplo n.º 33
0
    def check_input(self, gui=True):
        print '**** Check Inputs  ****'
        diffusion_available = False
        bvecs_available = False
        bvals_available = False
        t1_available = False
        t2_available = False
        valid_inputs = False

        dwi_file = os.path.join(self.subject_directory, 'dwi',
                                self.subject + '_dwi.nii.gz')
        bval_file = os.path.join(self.subject_directory, 'dwi',
                                 self.subject + '_dwi.bval')
        bvec_file = os.path.join(self.subject_directory, 'dwi',
                                 self.subject + '_dwi.bvec')
        T1_file = os.path.join(self.subject_directory, 'anat',
                               self.subject + '_T1w.nii.gz')
        T2_file = os.path.join(self.subject_directory, 'anat',
                               self.subject + '_T2w.nii.gz')

        print "Looking for...."
        print "dwi_file : %s" % dwi_file
        print "bvecs_file : %s" % bvec_file
        print "bvals_file : %s" % bval_file
        print "T1_file : %s" % T1_file
        print "T2_file : %s" % T2_file

        try:
            layout = BIDSLayout(self.base_directory)
            print "Valid BIDS dataset with %s subjects" % len(
                layout.get_subjects())
            for subj in layout.get_subjects():
                self.global_conf.subjects.append('sub-' + str(subj))
            # self.global_conf.subjects = ['sub-'+str(subj) for subj in layout.get_subjects()]
            self.global_conf.modalities = [
                str(mod) for mod in layout.get_modalities()
            ]
            # mods = layout.get_modalities()
            types = layout.get_types()
            # print "Available modalities :"
            # for mod in mods:
            #     print "-%s" % mod

            for typ in types:
                if typ == 'dwi' and os.path.isfile(dwi_file):
                    print "%s available" % typ
                    diffusion_available = True

                if typ == 'T1w' and os.path.isfile(T1_file):
                    print "%s available" % typ
                    t1_available = True

                if typ == 'T2w' and os.path.isfile(T2_file):
                    print "%s available" % typ
                    t2_available = True
        except:
            error(
                message=
                "Invalid BIDS dataset. Please see documentation for more details.",
                title="Error",
                buttons=['OK', 'Cancel'],
                parent=None)

        if os.path.isfile(bval_file): bvals_available = True

        if os.path.isfile(bvec_file): bvecs_available = True

        mem = Memory(base_dir=os.path.join(self.derivatives_directory, 'cmp',
                                           self.subject, 'tmp', 'nipype'))
        swap_and_reorient = mem.cache(SwapAndReorient)

        if diffusion_available:
            if bvals_available and bvecs_available:
                self.stages[
                    'Diffusion'].config.diffusion_imaging_model_choices = self.diffusion_imaging_model

                #Copy diffusion data to derivatives / cmp  / subject / dwi
                out_dwi_file = os.path.join(self.derivatives_directory, 'cmp',
                                            self.subject, 'dwi',
                                            self.subject + '_dwi.nii.gz')
                out_bval_file = os.path.join(self.derivatives_directory, 'cmp',
                                             self.subject, 'dwi',
                                             self.subject + '_dwi.bval')
                out_bvec_file = os.path.join(self.derivatives_directory, 'cmp',
                                             self.subject, 'dwi',
                                             self.subject + '_dwi.bvec')

                shutil.copy(src=dwi_file, dst=out_dwi_file)
                shutil.copy(src=bvec_file, dst=out_bvec_file)
                shutil.copy(src=bval_file, dst=out_bval_file)

                if t2_available:
                    print "Swap and reorient T2"
                    swap_and_reorient(
                        src_file=os.path.join(self.subject_directory, 'anat',
                                              self.subject + '_T2w.nii.gz'),
                        ref_file=os.path.join(self.subject_directory, 'dwi',
                                              self.subject + '_dwi.nii.gz'),
                        out_file=os.path.join(self.derivatives_directory,
                                              'cmp', self.subject, 'anat',
                                              self.subject + '_T2w.nii.gz'))
                if t1_available:
                    swap_and_reorient(
                        src_file=os.path.join(self.subject_directory, 'anat',
                                              self.subject + '_T1w.nii.gz'),
                        ref_file=os.path.join(self.subject_directory, 'dwi',
                                              self.subject + '_dwi.nii.gz'),
                        out_file=os.path.join(self.derivatives_directory,
                                              'cmp', self.subject, 'anat',
                                              self.subject + '_T1w.nii.gz'))
                    valid_inputs = True
                    input_message = 'Inputs check finished successfully.\nDiffusion and morphological data available.'
                else:
                    input_message = 'Error during inputs check.\nMorphological data (T1) not available.'
            else:
                input_message = 'Error during inputs check.\nDiffusion bvec or bval files not available.'
        elif t1_available:
            input_message = 'Error during inputs check. \nDiffusion data not available (DSI/DTI/HARDI).'
        else:
            input_message = 'Error during inputs check. No diffusion or morphological data available in folder ' + os.path.join(
                self.base_directory, 'RAWDATA') + '!'

        #diffusion_imaging_model = diffusion_imaging_model[0]

        if gui:
            #input_notification = Check_Input_Notification(message=input_message, diffusion_imaging_model_options=diffusion_imaging_model,diffusion_imaging_model=diffusion_imaging_model)
            #input_notification.configure_traits()
            print input_message
            self.global_conf.diffusion_imaging_model = self.diffusion_imaging_model
            diffusion_file = os.path.join(self.subject_directory, 'dwi',
                                          self.subject + '_dwi.nii.gz')
            n_vol = nib.load(diffusion_file).shape[3]
            if self.stages['Preprocessing'].config.end_vol == 0 or self.stages[
                    'Preprocessing'].config.end_vol == self.stages[
                        'Preprocessing'].config.max_vol or self.stages[
                            'Preprocessing'].config.end_vol >= n_vol - 1:
                self.stages['Preprocessing'].config.end_vol = n_vol - 1
            self.stages['Preprocessing'].config.max_vol = n_vol - 1
            self.stages[
                'Registration'].config.diffusion_imaging_model = self.diffusion_imaging_model
            self.stages[
                'Diffusion'].config.diffusion_imaging_model = self.diffusion_imaging_model
        else:
            print input_message
            self.global_conf.diffusion_imaging_model = self.diffusion_imaging_model
            diffusion_file = os.path.join(self.subject_directory, 'dwi',
                                          self.subject + '_dwi.nii.gz')
            n_vol = nib.load(diffusion_file).shape[3]
            if self.stages['Preprocessing'].config.end_vol == 0 or self.stages[
                    'Preprocessing'].config.end_vol == self.stages[
                        'Preprocessing'].config.max_vol or self.stages[
                            'Preprocessing'].config.end_vol >= n_vol - 1:
                self.stages['Preprocessing'].config.end_vol = n_vol - 1
            self.stages['Preprocessing'].config.max_vol = n_vol - 1
            self.stages[
                'Registration'].config.diffusion_imaging_model = self.diffusion_imaging_model
            self.stages[
                'Diffusion'].config.diffusion_imaging_model = self.diffusion_imaging_model

        if t2_available:
            self.stages['Registration'].config.registration_mode_trait = [
                'Linear + Non-linear (FSL)'
            ]  #,'BBregister (FS)','Nonlinear (FSL)']

        if (t1_available and diffusion_available):
            valid_inputs = True
        else:
            print "Missing required inputs."
            error(
                message=
                "Missing required inputs. Please see documentation for more details.",
                title="Error",
                buttons=['OK', 'Cancel'],
                parent=None)

        for stage in self.stages.values():
            if stage.enabled:
                print stage.name
                print stage.stage_dir

        self.fill_stages_outputs()

        return valid_inputs
Ejemplo n.º 34
0
def from_mni_to_native(img, sub_id, include_trans=[True, True, True],
                       interpolation='Linear'):
    '''Maps image from native space to mni.

    We assume that the transformation files already exist for the mappings
    between:
    1) mean bold and anatomy
    2) anatomy and oasis template
    3) oasis template and mni template

    The transforms to include are:
    1) From mni to oasis
    2) From oasis to anat
    3) From anat to bold

    The include transforms should be sequential to have meaninful output,
    which means that transformations sequence [True, False, True] is invalid.
    '''
    check = (include_trans == [True, False, True])
    if check:
        raise Exception('Invalid transformation sequence')
    pipeline_dir = 'pipelines/transformations'
    if not os.path.exists(pipeline_dir):
        os.makedirs(pipeline_dir)
    mem = Memory(pipeline_dir)
    transform = mem.cache(ApplyTransforms)

    oasis_template = os.path.join('pipelines',
                                  'OASIS-30_Atropos_template',
                                  'T_template0.nii.gz')
    anat = os.path.join('pipelines',
                        'preprocessing',
                        'sub{0}'.format(sub_id),
                        'highres001.nii')
    mean_bold = os.path.join('pipelines', 'preprocessing',
                             'sub{0}'.format(sub_id),
                             'mean_bold.nii')
    mni_to_oasis = os.path.join('pipelines', 'preprocessing',
                                'registered_templates', 'mni_to_oasis.h5')
    oasis_to_anat = os.path.join('pipelines', 'preprocessing',
                                 'sub{0}'.format(sub_id),
                                 'oasis_to_anat.h5')
    bold_to_anat = os.path.join('pipelines', 'preprocessing',
                                'sub{0}'.format(sub_id),
                                'bold_to_anat.txt')

    all_references = [oasis_template, anat, mean_bold]
    all_trans = [mni_to_oasis, oasis_to_anat, bold_to_anat]
    all_inv_trans = [False, False, True]
    transforms = []
    inv_trans_flags = []
    reference = None
    for idx, flag in enumerate(include_trans):
        if flag:
            transforms.append(all_trans[idx])
            inv_trans_flags.append(all_inv_trans[idx])
            # Use latest transformation as reference
            reference = all_references[idx]

    trans = transform(args='--float',
                      input_image_type=3,
                      interpolation=interpolation,
                      invert_transform_flags=inv_trans_flags[::-1],
                      num_threads=n_proc,
                      reference_image=reference,
                      terminal_output='file',
                      transforms=transforms[::-1],
                      input_image=img)

    return trans.outputs.output_image
Ejemplo n.º 35
0
def _realign(func_filename,
             write_dir,
             caching=False,
             terminal_output='allatonce',
             environ=None):
    if environ is None:
        environ = {'AFNI_DECONFLICT': 'OVERWRITE'}

    if caching:
        memory = Memory(write_dir)
        clip_level = memory.cache(afni.ClipLevel)
        threshold = memory.cache(fsl.Threshold)
        volreg = memory.cache(afni.Volreg)
        allineate = memory.cache(afni.Allineate)
        copy = memory.cache(afni.Copy)
        copy_geom = memory.cache(fsl.CopyGeom)
        tstat = memory.cache(afni.TStat)
        for step in [threshold, volreg, allineate, tstat, copy, copy_geom]:
            step.interface().set_default_terminal_output(terminal_output)
    else:
        clip_level = afni.ClipLevel().run
        threshold = fsl.Threshold(terminal_output=terminal_output).run
        volreg = afni.Volreg(terminal_output=terminal_output).run
        allineate = afni.Allineate(terminal_output=terminal_output).run
        copy = afni.Copy(terminal_output=terminal_output).run
        copy_geom = fsl.CopyGeom(terminal_output=terminal_output).run
        tstat = afni.TStat(terminal_output=terminal_output).run

    out_clip_level = clip_level(in_file=func_filename)

    out_threshold = threshold(in_file=func_filename,
                              thresh=out_clip_level.outputs.clip_val,
                              out_file=fname_presuffix(func_filename,
                                                       suffix='_thresholded',
                                                       newpath=write_dir))
    thresholded_filename = out_threshold.outputs.out_file

    out_volreg = volreg(  # XXX dfile not saved
        in_file=thresholded_filename,
        out_file=fname_presuffix(thresholded_filename,
                                 suffix='_volreg',
                                 newpath=write_dir),
        environ=environ,
        oned_file=fname_presuffix(thresholded_filename,
                                  suffix='_volreg.1Dfile.1D',
                                  use_ext=False,
                                  newpath=write_dir),
        oned_matrix_save=fname_presuffix(thresholded_filename,
                                         suffix='_volreg.aff12.1D',
                                         use_ext=False,
                                         newpath=write_dir))

    # Apply the registration to the whole head
    out_allineate = allineate(in_file=func_filename,
                              master=func_filename,
                              in_matrix=out_volreg.outputs.oned_matrix_save,
                              out_file=fname_presuffix(func_filename,
                                                       suffix='_volreg',
                                                       newpath=write_dir),
                              environ=environ)

    # 3dAllineate removes the obliquity. This is not a good way to readd it as
    # removes motion correction info in the header if it were an AFNI file...as
    # it happens it's NIfTI which does not store that so irrelevant!
    out_copy = copy(in_file=out_allineate.outputs.out_file,
                    out_file=fname_presuffix(out_allineate.outputs.out_file,
                                             suffix='_oblique',
                                             newpath=write_dir),
                    environ=environ)
    out_copy_geom = copy_geom(dest_file=out_copy.outputs.out_file,
                              in_file=out_volreg.outputs.out_file)

    oblique_allineated_filename = out_copy_geom.outputs.out_file

    # Create a (hopefully) nice mean image for use in the registration
    out_tstat = tstat(in_file=oblique_allineated_filename,
                      args='-mean',
                      out_file=fname_presuffix(oblique_allineated_filename,
                                               suffix='_tstat',
                                               newpath=write_dir),
                      environ=environ)

    # Remove intermediate outputs
    if not caching:
        for output_file in [
                thresholded_filename, out_volreg.outputs.oned_matrix_save,
                out_volreg.outputs.out_file, out_volreg.outputs.md1d_file,
                out_allineate.outputs.out_file
        ]:
            os.remove(output_file)
    return (oblique_allineated_filename, out_tstat.outputs.out_file,
            out_volreg.outputs.oned_file)
Ejemplo n.º 36
0
    img.to_filename('/home/ys218403/Data/dartel_cache/sub_%s' % name)
    func_file = '/home/ys218403/Data/dartel_cache/sub_%s' % name
    func_niimg = gaelmem.cache(resample_img)(
        nb.load(func_file),
        target_affine=anat_niimg.get_affine(),
        target_shape=anat_niimg.shape)
    func_niimg.to_filename(
        '/home/ys218403/Data/dartel_cache/oversampled_%s' % name)
    resampled_func.append(
        '/home/ys218403/Data/dartel_cache/oversampled_%s' % name)

print resampled_func

cache_dir = "/home/ys218403/Data/dartel_cache"
if not os.path.exists(cache_dir): os.makedirs(cache_dir)
mem = Memory(cache_dir)

tricky_kwargs = {}

dartelnorm2mni_result = mem.cache(spm.DARTELNorm2MNI)(
    apply_to_files=resampled_func[:1],
    flowfield_files=[flow_fields],
    template_file=template_file,
    ignore_exception=False,
    modulate=False,  # don't modulate
    fwhm=0.,  # don't smooth
    **tricky_kwargs)

normalized_func = dartelnorm2mni_result.outputs.normalized_files

# createwarped_result = mem.cache(spm.CreateWarped)(
Ejemplo n.º 37
0
def _func_to_template(func_coreg_filename,
                      template_filename,
                      write_dir,
                      func_to_anat_oned_filename,
                      anat_to_template_oned_filename,
                      anat_to_template_warp_filename,
                      voxel_size=None,
                      caching=False,
                      verbose=True):
    """ Applies successive transforms to coregistered functional to put it in
    template space.

    Parameters
    ----------
    coreg_func_filename : str
        Path to functional volume, coregistered to a common space with the
        anatomical volume.

    template_filename : str
        Template to register the functional to.

    func_to_anat_oned_filename : str
        Path to the affine 1D transform from functional to coregistration
        space.

    anat_to_template_oned_filename : str
        Path to the affine 1D transform from anatomical to template space.

    anat_to_template_warp_filename : str
        Path to the warp transform from anatomical to template space.

    voxel_size : 3-tuple of floats, optional
        Voxel size of the registered functional, in mm.

    caching : bool, optional
        Wether or not to use caching.

    verbose : bool, optional
        If True, all steps are verbose. Note that caching implies some
        verbosity in any case.
    """
    environ = {}
    if verbose:
        terminal_output = 'allatonce'
    else:
        terminal_output = 'none'

    if caching:
        memory = Memory(write_dir)
        resample = memory.cache(afni.Resample)
        catmatvec = memory.cache(afni.CatMatvec)
        allineate = memory.cache(afni.Allineate)
        warp_apply = memory.cache(afni.NwarpApply)
        for step in [resample, allineate, warp_apply]:
            step.interface().set_default_terminal_output(terminal_output)
    else:
        resample = afni.Resample(terminal_output=terminal_output).run
        catmatvec = afni.CatMatvec().run
        allineate = afni.Allineate(terminal_output=terminal_output).run
        warp_apply = afni.NwarpApply(terminal_output=terminal_output).run
        environ['AFNI_DECONFLICT'] = 'OVERWRITE'

    current_dir = os.getcwd()
    os.chdir(write_dir)  # XXX to remove
    normalized_filename = fname_presuffix(func_coreg_filename,
                                          suffix='_normalized')
    if voxel_size is None:
        func_template_filename = template_filename
    else:
        out_resample = resample(in_file=template_filename,
                                voxel_size=voxel_size,
                                outputtype='NIFTI_GZ',
                                environ=environ)
        func_template_filename = out_resample.outputs.out_file

    if anat_to_template_warp_filename is None:
        affine_transform_filename = fname_presuffix(func_to_anat_oned_filename,
                                                    suffix='_to_template')
        _ = catmatvec(in_file=[(anat_to_template_oned_filename, 'ONELINE'),
                               (func_to_anat_oned_filename, 'ONELINE')],
                      oneline=True,
                      out_file=affine_transform_filename,
                      environ=environ)
        _ = allineate(in_file=func_coreg_filename,
                      master=func_template_filename,
                      in_matrix=affine_transform_filename,
                      out_file=normalized_filename,
                      environ=environ)
    else:
        warp = "'{0} {1} {2}'".format(anat_to_template_warp_filename,
                                      anat_to_template_oned_filename,
                                      func_to_anat_oned_filename)

        _ = warp_apply(in_file=func_coreg_filename,
                       master=func_template_filename,
                       warp=warp,
                       out_file=normalized_filename,
                       environ=environ)
    os.chdir(current_dir)
    return normalized_filename
Ejemplo n.º 38
0
    def check_input(self, gui=True):
        print '**** Check Inputs ****'
        fMRI_available = False
        t1_available = False
        t2_available = False
        valid_inputs = False

        mem = Memory(base_dir=os.path.join(self.base_directory,'NIPYPE'))
        swap_and_reorient = mem.cache(SwapAndReorient)

        # Check for (and if existing, convert) functional data
        input_dir = os.path.join(self.base_directory,'RAWDATA','fMRI')
        if len(os.listdir(input_dir)) > 0:
            if convert_rawdata(self.base_directory, input_dir, 'fMRI'):
                fMRI_available = True

        # Check for (and if existing, convert)  T1
        input_dir = os.path.join(self.base_directory,'RAWDATA','T1')
        if len(os.listdir(input_dir)) > 0:
            if convert_rawdata(self.base_directory, input_dir, 'T1_orig'):
                t1_available = True

        # Check for (and if existing, convert)  T2
        input_dir = os.path.join(self.base_directory,'RAWDATA','T2')
        if len(os.listdir(input_dir)) > 0:
            if convert_rawdata(self.base_directory, input_dir, 'T2_orig'):
                t2_available = True   

        if fMRI_available:
            if t2_available:
                swap_and_reorient(src_file=os.path.join(self.base_directory,'NIFTI','T2_orig.nii.gz'),
                                  ref_file=os.path.join(self.base_directory,'NIFTI','fMRI.nii.gz'),
                                  out_file=os.path.join(self.base_directory,'NIFTI','T2.nii.gz'))
            if t1_available:
                swap_and_reorient(src_file=os.path.join(self.base_directory,'NIFTI','T1_orig.nii.gz'),
                                  ref_file=os.path.join(self.base_directory,'NIFTI','fMRI.nii.gz'),
                                  out_file=os.path.join(self.base_directory,'NIFTI','T1.nii.gz'))
                valid_inputs = True
                input_message = 'Inputs check finished successfully.\nfMRI and morphological data available.'
            else:
                input_message = 'Error during inputs check.\nMorphological data (T1) not available.'
        elif t1_available:
            input_message = 'Error during inputs check. \nfMRI data not available (fMRI).'
        else:
            input_message = 'Error during inputs check. No fMRI or morphological data available in folder '+os.path.join(self.base_directory,'RAWDATA')+'!'

        if gui: 
            input_notification = Check_Input_Notification(message=input_message, imaging_model='fMRI')
            input_notification.configure_traits()
            self.global_conf.imaging_model = input_notification.imaging_model
            self.stages['Registration'].config.imaging_model = input_notification.imaging_model
        else:
            print input_message
            self.global_conf.imaging_model = 'fMRI'
            self.stages['Registration'].config.imaging_model = 'fMRI'
       
        if t2_available:
            self.stages['Registration'].config.registration_mode_trait = ['Linear (FSL)','BBregister (FS)','Nonlinear (FSL)']
       
        self.fill_stages_outputs()
       
        return valid_inputs
Ejemplo n.º 39
0
def preproc(funcfile, anatfile, sid, outdir, repetitiontime, template, jipdir,
            erase, resample, interleaved, sliceorder, realign_dof,
            realign_to_vol, warp, warp_njobs, warp_index, warp_file,
            warp_restrict, delta_te, dwell_time, manufacturer, blip_files,
            blip_enc_dirs, unwarp_direction, phase_file, magnitude_file,
            anatorient, funcorient, kernel_size, fslconfig, normalization_trf,
            coregistration_trf, recon1, recon2, auto, verbose):
    """ fMRI preprocessings using FSL, SPM, JIP, and ANTS.
    """
    # TODO: remove when all controls available in pypipe
    if not isinstance(erase, bool):
        erase = eval(erase)
        resample = eval(resample)
        interleaved = eval(interleaved)
        realign_to_vol = eval(realign_to_vol)
        warp = eval(warp)
        recon1 = eval(recon1)
        recon2 = eval(recon2)
        auto = eval(auto)
        warp_restrict = eval(warp_restrict)
        blip_files = None if blip_files == "" else eval(blip_files)
        blip_enc_dirs = eval(blip_enc_dirs)

    # Read input parameters
    funcfile = os.path.abspath(funcfile)
    anatfile = os.path.abspath(anatfile)
    template = os.path.abspath(template)
    jipdir = os.path.abspath(jipdir)
    realign_to_mean = not realign_to_vol
    subjdir = os.path.join(os.path.abspath(outdir), sid)
    cachedir = os.path.join(subjdir, "cachedir")
    outputs = {}
    if erase and os.path.isdir(subjdir):
        shutil.rmtree(subjdir)
    if not os.path.isdir(cachedir):
        os.makedirs(cachedir)
    nipype_memory = NipypeMemory(cachedir)
    joblib_memory = JoblibMemory(cachedir, verbose=verbose)

    def display_outputs(outputs, verbose, **kwargs):
        """ Simple function to display/store step outputs.
        """
        if verbose > 0:
            print("-" * 50)
            for key, val in kwargs.items():
                print("{0}: {1}".format(key, val))
            print("-" * 50)
        outputs.update(kwargs)

    # Check input parameters
    template_axes = guess_orientation(template)
    if template_axes != "RAS":
        raise ValueError("The template orientation must be 'RAS', '{0}' "
                         "found.".format(template_axes))
    check_jip_install(jipdir)
    if sliceorder not in ("ascending", "descending"):
        raise ValueError("Supported slice order are: ascending & descending.")

    # Slice timing
    fslenv = environment(fslconfig)
    if (fslenv["FSLDIR"] != os.environ.get("FSLDIR", "")):
        os.environ = concat_environment(os.environ, fslenv)
    st_dir = os.path.join(subjdir, STEPS["slice_timing"])
    if not os.path.isdir(st_dir):
        os.mkdir(st_dir)
    fsl.FSLCommand.set_default_output_type('NIFTI_GZ')
    interface = nipype_memory.cache(fsl.SliceTimer)
    returncode = interface(
        in_file=funcfile,
        interleaved=interleaved,
        slice_direction=3,
        time_repetition=repetitiontime,
        index_dir=False if sliceorder == "ascending" else True,
        out_file=os.path.join(
            st_dir,
            os.path.basename(funcfile).split(".")[0] + ".nii.gz"))
    st_outputs = returncode.outputs.get()
    slice_time_corrected_file = st_outputs["slice_time_corrected_file"]
    display_outputs(outputs,
                    verbose,
                    slice_time_corrected_file=slice_time_corrected_file)

    # B0 inhomogeneities: topup or fugue or None + motion induced
    if warp:
        warp_dir = os.path.join(subjdir, STEPS["warp"])
        if not os.path.isdir(warp_dir):
            os.mkdir(warp_dir)
        if blip_files is not None:
            interface = joblib_memory.cache(topup)
            fieldmap_hz_file, unwarped_epi_file = interface(
                blip_up_file=blip_files[0],
                blip_down_file=blip_files[1],
                blip_up_phase_enc_dir=blip_enc_dirs[0],
                blip_down_phase_enc_dir=blip_enc_dirs[1],
                apply_to=slice_time_corrected_file,
                unwarp_direction=unwarp_direction,
                dwell_time=dwell_time,
                outdir=warp_dir,
                fsl_sh=fslconfig)
        elif phase_file is not None:
            interface = joblib_memory.cache(fugue)
            magnitude_brain_mask_file, vsm_file, unwarped_epi_file = interface(
                epi_file=slice_time_corrected_file,
                phase_file=phase_file,
                magnitude_file=magnitude_file,
                delta_te=delta_te,
                dwell_time=dwell_time,
                unwarp_direction=unwarp_direction,
                manufacturer=manufacturer,
                outdir=warp_dir,
                fsl_sh=fslconfig,
                verbose=verbose)
        else:
            unwarped_epi_file = slice_time_corrected_file
        interface = joblib_memory.cache(timeserie_to_reference)
        b0_corrected_file = interface(tfile=unwarped_epi_file,
                                      rindex=warp_index,
                                      restrict_deformation=warp_restrict,
                                      rfile=warp_file,
                                      outdir=warp_dir,
                                      njobs=warp_njobs,
                                      clean_tmp=True)
    else:
        b0_corrected_file = slice_time_corrected_file
    display_outputs(outputs, verbose, b0_corrected_file=b0_corrected_file)

    # Reorient images not in RAS coordinate system and
    # reorient images to match the orientation of the standard MNI152 template.
    reorient_dir = os.path.join(subjdir, STEPS["reorient"])
    if not os.path.isdir(reorient_dir):
        os.mkdir(reorient_dir)
    reoriented_funcfile = b0_corrected_file
    reoriented_anatfile = anatfile
    interface = joblib_memory.cache(reorient_image)
    if funcorient != "RAS":
        reoriented_funcfile = interface(b0_corrected_file,
                                        axes=funcorient,
                                        prefix="o",
                                        output_directory=reorient_dir)
    if anatorient != "RAS":
        reoriented_anatfile = interface(anatfile,
                                        axes=anatorient,
                                        prefix="o",
                                        output_directory=reorient_dir)
    standard_funcfile = os.path.join(
        reorient_dir,
        "d" + os.path.basename(reoriented_funcfile).split(".")[0])
    standard_anatfile = os.path.join(
        reorient_dir,
        "d" + os.path.basename(reoriented_anatfile).split(".")[0])
    interface = joblib_memory.cache(fslreorient2std)
    standard_funcfile = interface(reoriented_funcfile,
                                  standard_funcfile,
                                  fslconfig=fslconfig)
    standard_anatfile = interface(reoriented_anatfile,
                                  standard_anatfile,
                                  fslconfig=fslconfig)
    display_outputs(outputs,
                    verbose,
                    standard_funcfile=standard_funcfile,
                    standard_anatfile=standard_anatfile)

    # Downsample template
    if resample:
        template = resample_image(source_file=template,
                                  target_file=standard_anatfile,
                                  out_file=os.path.join(
                                      subjdir, "template.nii.gz"),
                                  fslconfig=fslconfig)

    # Realign
    realign_dir = os.path.join(subjdir, STEPS["realign"])
    if not os.path.isdir(realign_dir):
        os.mkdir(realign_dir)
    realign_func_rootfile = os.path.join(
        realign_dir, "r" + os.path.basename(standard_funcfile).split(".")[0])
    interface = joblib_memory.cache(mcflirt)
    realign_funcfile, realign_func_meanfile, realign_func_parfile = interface(
        in_file=standard_funcfile,
        out_fileroot=realign_func_rootfile,
        cost="normcorr",
        bins=256,
        dof=realign_dof,
        refvol=warp_index,
        reffile=warp_file,
        reg_to_mean=realign_to_mean,
        mats=True,
        plots=True,
        verbose=verbose,
        shfile=fslconfig)
    display_outputs(outputs,
                    verbose,
                    realign_funcfile=realign_funcfile,
                    realign_func_meanfile=realign_func_meanfile)

    # Early stop detected
    if recon1:
        print(
            "[warn] User requested a processing early stop. Remove the 'recon1' "
            "option to resume.")
        return outputs

    # Normalization.
    normalization_dir = os.path.join(subjdir, STEPS["normalization"])
    if not os.path.isdir(normalization_dir):
        os.mkdir(normalization_dir)
    if normalization_trf is not None:
        shutil.copyfile(normalization_trf,
                        os.path.join(normalization_dir, "align.com"))
    interface = joblib_memory.cache(jip_align)
    (register_anatfile, register_anat_maskfile, native_anat_maskfile,
     align_normfile) = interface(source_file=standard_anatfile,
                                 target_file=template,
                                 outdir=normalization_dir,
                                 jipdir=jipdir,
                                 prefix="w",
                                 auto=auto,
                                 non_linear=True,
                                 fslconfig=fslconfig)
    display_outputs(outputs,
                    verbose,
                    register_anatfile=register_anatfile,
                    register_anat_maskfile=register_anat_maskfile,
                    native_anat_maskfile=native_anat_maskfile,
                    align_normfile=align_normfile)

    # Tissues segmentation and spatial intensity variations correction.
    inhomogeneities_dir = os.path.join(subjdir, STEPS["inhomogeneities"])
    if not os.path.isdir(inhomogeneities_dir):
        os.mkdir(inhomogeneities_dir)
    biascorrected_anatfile = os.path.join(
        inhomogeneities_dir, "n4_" + os.path.basename(native_anat_maskfile))
    bias_anatfile = os.path.join(
        inhomogeneities_dir,
        "n4field_" + os.path.basename(native_anat_maskfile))
    n4 = ants.N4BiasFieldCorrection()
    interface = nipype_memory.cache(ants.N4BiasFieldCorrection)
    returncode = interface(dimension=3,
                           input_image=native_anat_maskfile,
                           bspline_fitting_distance=200,
                           shrink_factor=2,
                           n_iterations=[50, 50, 40, 30],
                           convergence_threshold=1e-6,
                           output_image=biascorrected_anatfile,
                           bias_image=bias_anatfile)
    n4_outputs = returncode.outputs.get()
    display_outputs(outputs,
                    verbose,
                    bias_anatfile=bias_anatfile,
                    biascorrected_anatfile=biascorrected_anatfile)

    # Coregistration.
    coregistration_dir = os.path.join(subjdir, STEPS["coregistration"])
    if not os.path.isdir(coregistration_dir):
        os.mkdir(coregistration_dir)
    if coregistration_trf is not None:
        shutil.copy(coregistration_trf, coregistration_dir)
    interface = joblib_memory.cache(jip_align)
    (register_func_meanfile, register_func_mean_maskfile,
     native_func_mean_maskfile,
     align_coregfile) = interface(source_file=realign_func_meanfile,
                                  target_file=biascorrected_anatfile,
                                  outdir=coregistration_dir,
                                  jipdir=jipdir,
                                  prefix="w",
                                  auto=auto,
                                  non_linear=False,
                                  fslconfig=fslconfig)
    display_outputs(outputs,
                    verbose,
                    register_func_meanfile=register_func_meanfile,
                    register_func_mean_maskfile=register_func_mean_maskfile,
                    native_func_mean_maskfile=native_func_mean_maskfile,
                    align_coregfile=align_coregfile)

    # Early stop detected
    if recon2:
        print(
            "[warn] User requested a processing early stop. Remove the 'recon2' "
            "option to resume.")
        return outputs

    # Wrap functional: resample the functional serie and mask the registered serie.
    wrap_dir = os.path.join(subjdir, STEPS["wrap"])
    if not os.path.isdir(wrap_dir):
        os.mkdir(wrap_dir)
    interface = joblib_memory.cache(apply_jip_align)
    deformed_files = interface(apply_to_files=[realign_funcfile],
                               align_with=[align_coregfile, align_normfile],
                               outdir=wrap_dir,
                               jipdir=jipdir,
                               prefix="w",
                               apply_inv=False)
    register_funcfile = deformed_files[0]
    register_func_mask_fileroot = os.path.join(
        wrap_dir, "m" + os.path.basename(register_funcfile).split(".")[0])
    interface = joblib_memory.cache(apply_mask)
    register_func_maskfile = interface(
        input_file=register_funcfile,
        output_fileroot=register_func_mask_fileroot,
        mask_file=template,
        fslconfig=fslconfig)
    display_outputs(outputs,
                    verbose,
                    register_funcfile=register_funcfile,
                    register_func_maskfile=register_func_maskfile)

    # Smooth the functional serie.
    smooth_dir = os.path.join(subjdir, STEPS["smooth"])
    if not os.path.isdir(smooth_dir):
        os.mkdir(smooth_dir)
    interface = nipype_memory.cache(fsl.Smooth)
    returncode = interface(
        in_file=register_func_maskfile,
        fwhm=kernel_size,
        output_type="NIFTI",
        smoothed_file=os.path.join(
            smooth_dir, "smooth_" +
            os.path.basename(register_func_maskfile).split(".")[0] + ".nii"))
    smooth_outputs = returncode.outputs.get()
    smoothed_file = smooth_outputs["smoothed_file"]
    display_outputs(outputs, verbose, smoothed_file=smoothed_file)

    # Copy the results to the root directory: use Nifti format.
    nibabel.load(smoothed_file).to_filename(os.path.join(subjdir, "sMNI.nii"))
    nibabel.load(register_func_maskfile).to_filename(
        os.path.join(subjdir, "MNI.nii"))
    nibabel.load(register_anat_maskfile).to_filename(
        os.path.join(subjdir, "anat.nii"))

    # Compute some snaps assessing the different processing steps.
    snapdir = os.path.join(subjdir, STEPS["snaps"])
    if not os.path.isdir(snapdir):
        os.mkdir(snapdir)
    interface = joblib_memory.cache(triplanar)
    # > generate coregistration plot
    coregister_fileroot = os.path.join(snapdir, "coregister")
    coregister_file = interface(input_file=register_func_meanfile,
                                output_fileroot=coregister_fileroot,
                                overlays=[standard_anatfile],
                                overlays_colors=None,
                                contours=True,
                                edges=False,
                                overlay_opacities=[0.7],
                                resolution=300)
    # > generate normalization plot
    normalize_fileroot = os.path.join(snapdir, "normalization")
    normalize_file = interface(input_file=register_anatfile,
                               output_fileroot=normalize_fileroot,
                               overlays=[template],
                               overlays_colors=None,
                               contours=True,
                               edges=False,
                               overlay_opacities=[0.7],
                               resolution=300)
    # > generate a motion parameter plot
    interface = joblib_memory.cache(plot_fsl_motion_parameters)
    realign_motion_file = os.path.join(snapdir,
                                       "realign_motion_parameters.png")
    interface(realign_func_parfile, realign_motion_file)
    display_outputs(outputs,
                    verbose,
                    normalize_file=normalize_file,
                    realign_motion_file=realign_motion_file,
                    coregister_file=coregister_file)

    # Generate a QC reporting
    reportdir = os.path.join(subjdir, STEPS["report"])
    reportfile = os.path.join(reportdir, "QC_preproc_{0}.pdf".format(sid))
    if not os.path.isdir(reportdir):
        os.mkdir(reportdir)
    interface = joblib_memory.cache(generate_pdf)
    tic = datetime.now()
    date = "{0}-{1}-{2}".format(tic.year, tic.month, tic.day)
    interface(datapath=snapdir,
              struct_file=os.path.join(
                  os.path.abspath(os.path.dirname(pypreclin.__file__)),
                  "utils", "resources", "pypreclin_qcpreproc.json"),
              author="NeuroSpin",
              client="-",
              poweredby="FSL-SPM-Nipype-JIP",
              project="-",
              timepoint="-",
              subject=sid,
              date=date,
              title="fMRI Preprocessing QC Reporting",
              filename=reportfile,
              pagesize=None,
              left_margin=10,
              right_margin=10,
              top_margin=20,
              bottom_margin=20,
              show_boundary=False,
              verbose=0)
    display_outputs(outputs, verbose, reportfile=reportfile)

    return outputs
def run_suject_level1_glm(subject_data,
                          readout_time=.01392,  # seconds
                          tr=.72,
                          dc=True,
                          hrf_model="Canonical with Derivative",
                          drift_model="Cosine",
                          hfcut=100,
                          regress_motion=True,
                          slicer='ortho',
                          cut_coords=None,
                          threshold=3.,
                          cluster_th=15,
                          normalize=True,
                          fwhm=0.,
                          protocol="MOTOR",
                          func_write_voxel_sizes=None,
                          anat_write_voxel_sizes=None,
                          **other_preproc_kwargs
                          ):
    """
    Function to do preproc + analysis for a single HCP subject (task fMRI)

    """

    add_regs_files = None
    n_motion_regressions = 6
    subject_data.n_sessions = 2

    subject_data.tmp_output_dir = os.path.join(subject_data.output_dir, "tmp")
    if not os.path.exists(subject_data.tmp_output_dir):
        os.makedirs(subject_data.tmp_output_dir)

    if not os.path.exists(subject_data.output_dir):
        os.makedirs(subject_data.output_dir)

    mem = Memory(os.path.join(subject_data.output_dir, "cache_dir"),
                 verbose=100)

    # glob design files (.fsf)
    subject_data.design_files = [os.path.join(
            subject_data.data_dir, ("MNINonLinear/Results/tfMRI_%s_%s/"
                                    "tfMRI_%s_%s_hp200_s4_level1.fsf") % (
                protocol, direction, protocol, direction))
            for direction in ['LR', 'RL']]

    assert len(subject_data.design_files) == 2
    for df in subject_data.design_files:
        if not os.path.isfile(df):
            return

    if 0x0:
        subject_data = _do_fmri_distortion_correction(
            subject_data, dc=dc, fwhm=fwhm, readout_time=readout_time,
            **other_preproc_kwargs)

    # chronometry
    stats_start_time = pretty_time()

    # merged lists
    paradigms = []
    frametimes_list = []
    design_matrices = []
    # fmri_files = []
    n_scans = []
    # for direction, direction_index in zip(['LR', 'RL'], xrange(2)):
    for sess in xrange(subject_data.n_sessions):
        direction = ['LR', 'RL'][sess]
        # glob the design file
        # design_file = os.path.join(# _subject_data_dir, "tfMRI_%s_%s" % (
                # protocol, direction),
        design_file = subject_data.design_files[sess]
                #                    "tfMRI_%s_%s_hp200_s4_level1.fsf" % (
                # protocol, direction))
        if not os.path.isfile(design_file):
            print "Can't find design file %s; skipping subject %s" % (
                design_file, subject_data.subject_id)
            return

        # read the experimental setup
        print "Reading experimental setup from %s ..." % design_file
        fsl_condition_ids, timing_files, fsl_contrast_ids, contrast_values = \
            read_fsl_design_file(design_file)
        print "... done.\r\n"

        # fix timing filenames
        timing_files = [tf.replace("EVs", "tfMRI_%s_%s/EVs" % (
                    protocol, direction)) for tf in timing_files]

        # make design matrix
        print "Constructing design matrix for direction %s ..." % direction
        _n_scans = nibabel.load(subject_data.func[sess]).shape[-1]
        n_scans.append(_n_scans)
        add_regs_file = add_regs_files[
            sess] if not add_regs_files is None else None
        design_matrix, paradigm, frametimes = make_dmtx_from_timing_files(
            timing_files, fsl_condition_ids, n_scans=_n_scans, tr=tr,
            hrf_model=hrf_model, drift_model=drift_model, hfcut=hfcut,
            add_regs_file=add_regs_file,
            add_reg_names=[
                'Translation along x axis',
                'Translation along yaxis',
                'Translation along z axis',
                'Rotation along x axis',
                'Rotation along y axis',
                'Rotation along z axis',
                'Differential Translation along x axis',
                'Differential Translation along yaxis',
                'Differential Translation along z axis',
                'Differential Rotation along x axis',
                'Differential Rotation along y axis',
                'Differential Rotation along z axis'
                ][:n_motion_regressions] if not add_regs_files is None
            else None,
            )

        print "... done."
        paradigms.append(paradigm)
        frametimes_list.append(frametimes)
        design_matrices.append(design_matrix)

        # convert contrasts to dict
        contrasts = dict((contrast_id,
                          # append zeros to end of contrast to match design
                          np.hstack((contrast_value, np.zeros(len(
                                design_matrix.names) - len(contrast_value)))))

                         for contrast_id, contrast_value in zip(
                fsl_contrast_ids, contrast_values))

        # more interesting contrasts
        if protocol == 'MOTOR':
            contrasts['RH-LH'] = contrasts['RH'] - contrasts['LH']
            contrasts['LH-RH'] = -contrasts['RH-LH']
            contrasts['RF-LF'] = contrasts['RF'] - contrasts['LF']
            contrasts['LF-RF'] = -contrasts['RF-LF']
            contrasts['H'] = contrasts['RH'] + contrasts['LH']
            contrasts['F'] = contrasts['RF'] + contrasts['LF']
            contrasts['H-F'] = contrasts['RH'] + contrasts['LH'] - (
                contrasts['RF'] - contrasts['LF'])
            contrasts['F-H'] = -contrasts['H-F']

        contrasts = dict((k, v) for k, v in contrasts.iteritems() if "-" in k)

    # replicate contrasts across sessions
    contrasts = dict((cid, [cval] * 2)
                     for cid, cval in contrasts.iteritems())

    cache_dir = cache_dir = os.path.join(subject_data.output_dir,
                                         'cache_dir')
    if not os.path.exists(cache_dir):
        os.makedirs(cache_dir)
    nipype_mem = NipypeMemory(base_dir=cache_dir)

    if 0x0:
        if np.sum(fwhm) > 0.:
            subject_data.func = nipype_mem.cache(spm.Smooth)(
                in_files=subject_data.func,
                fwhm=fwhm,
                ignore_exception=False,
                ).outputs.smoothed_files

    # fit GLM
    def tortoise(*args):
        print args
        print (
            'Fitting a "Fixed Effect" GLM for merging LR and RL '
            'phase-encoding directions for subject %s ...' % (
                subject_data.subject_id))
        fmri_glm = FMRILinearModel(subject_data.func,
                                   [design_matrix.matrix
                                    for design_matrix in design_matrices],
                                   mask='compute'
                                   )
        fmri_glm.fit(do_scaling=True, model='ar1')
        print "... done.\r\n"

        # save computed mask
        mask_path = os.path.join(subject_data.output_dir, "mask.nii")
        print "Saving mask image to %s ..." % mask_path
        nibabel.save(fmri_glm.mask, mask_path)
        print "... done.\r\n"

        z_maps = {}
        effects_maps = {}
        map_dirs = {}
        try:
            for contrast_id, contrast_val in contrasts.iteritems():
                print "\tcontrast id: %s" % contrast_id
                z_map, eff_map = fmri_glm.contrast(
                    contrast_val,
                    con_id=contrast_id,
                    output_z=True,
                    output_effects=True
                    )

                # store stat maps to disk
                for map_type, out_map in zip(['z', 'effects'],
                                             [z_map, eff_map]):
                    map_dir = os.path.join(
                        subject_data.output_dir, '%s_maps' % map_type)
                    map_dirs[map_type] = map_dir
                    if not os.path.exists(map_dir):
                        os.makedirs(map_dir)
                    map_path = os.path.join(
                        map_dir, '%s_%s.nii' % (map_type, contrast_id))
                    print "\t\tWriting %s ..." % map_path

                    nibabel.save(out_map, map_path)

                    # collect zmaps for contrasts we're interested in
                    if map_type == 'z':
                        z_maps[contrast_id] = map_path

                    if map_type == 'effects':
                        effects_maps[contrast_id] = map_path

            return effects_maps, z_maps, mask_path, map_dirs
        except:
            return None

    # compute native-space maps and mask
    stuff = mem.cache(tortoise)(
        subject_data.func, subject_data.anat)
    if stuff is None:
        return None
    effects_maps, z_maps, mask_path, map_dirs = stuff

    # remove repeated contrasts
    contrasts = dict((cid, cval[0]) for cid, cval in contrasts.iteritems())
    import json
    json.dump(dict((k, list(v)) for k, v in contrasts.iteritems()),
              open(os.path.join(subject_data.tmp_output_dir,
                                "contrasts.json"), "w"))
    subject_data.contrasts = contrasts

    if normalize:
        assert hasattr(subject_data, "parameter_file")

        subject_data.native_effects_maps = effects_maps
        subject_data.native_z_maps = z_maps
        subject_data.native_mask_path = mask_path

        # warp effects maps and mask from native to standard space (MNI)
        apply_to_files = [
            v for _, v in subject_data.native_effects_maps.iteritems()
            ] + [subject_data.native_mask_path]
        tmp = nipype_mem.cache(spm.Normalize)(
            parameter_file=getattr(subject_data, "parameter_file"),
            apply_to_files=apply_to_files,
            write_bounding_box=[[-78, -112, -50], [78, 76, 85]],
            write_voxel_sizes=func_write_voxel_sizes,
            write_wrap=[0, 0, 0],
            write_interp=1,
            jobtype='write',
            ignore_exception=False,
            ).outputs.normalized_files

        subject_data.mask = hard_link(tmp[-1], subject_data.output_dir)
        subject_data.effects_maps = dict(zip(effects_maps.keys(), hard_link(
                    tmp[:-1], map_dirs["effects"])))

        # warp anat image
        subject_data.anat = hard_link(nipype_mem.cache(spm.Normalize)(
                parameter_file=getattr(subject_data, "parameter_file"),
                apply_to_files=subject_data.anat,
                write_bounding_box=[[-78, -112, -50], [78, 76, 85]],
                write_voxel_sizes=anat_write_voxel_sizes,
                write_wrap=[0, 0, 0],
                write_interp=1,
                jobtype='write',
                ignore_exception=False,
                ).outputs.normalized_files, subject_data.anat_output_dir)
    else:
        subject_data.mask = mask_path
        subject_data.effects_maps = effects_maps
        subject_data.z_maps = z_maps

    return subject_data
Ejemplo n.º 41
0
            print nifti_file, anat_image
            shutil.move(nifti_file, anat_image)
        else:
            print nifti_file, fmri_sessions[session_id]
            shutil.move(nifti_file, fmri_sessions[session_id])

        # remove the dicom dirs
        for x in glob.glob(os.path.join(dicom_dir, '*')):
            os.remove(x)
        os.removedirs(dicom_dir)

    ##############################################################
    # Preprocessing
    ##############################################################

    mem = Memory(base_dir=subject_dir)

    ##############################################################
    # Anatomical segmentation (White/Grey matter)

    seg = mem.cache(spm.Segment)
    
    out_seg = seg(data=anat_image,
                  gm_output_type=[True, True, True],
                  wm_output_type=[True, True, True],
                  csf_output_type=[True, True, True])
    sn_file = out_seg.outputs.transformation_mat
    inv_sn_file = out_seg.outputs.inverse_transformation_mat
    gm_image = out_seg.outputs.normalized_gm_image
    native_gm_image = out_seg.outputs.native_gm_image
Ejemplo n.º 42
0
    out.runtime.cwd
"""

from nipype.interfaces import fsl
fsl.FSLCommand.set_default_output_type('NIFTI')

from nipype.caching import Memory

import glob

# First retrieve the list of files that we want to work upon
in_files = glob.glob('data/*/f3.nii')

# Create a memory context
mem = Memory('.')

# Apply an arbitrary (and pointless, here) threshold to the files)
threshold = [
    mem.cache(fsl.Threshold)(in_file=f, thresh=i)
    for i, f in enumerate(in_files)
]

# Merge all these files along the time dimension
out_merge = mem.cache(fsl.Merge)(
    dimension="t",
    in_files=[t.outputs.out_file for t in threshold],
)
# And finally compute the mean
out_mean = mem.cache(fsl.MeanImage)(in_file=out_merge.outputs.merged_file)
Ejemplo n.º 43
0
    })
current_directory = os.getcwd()

# Loop over subjects
for (func_file, anat_file) in zip(heroes['BOLD EPI'], heroes['anat']):
    # Create a memory context
    subject_directory = os.path.relpath(anat_file, subjects_parent_directory)
    subject_directory = subject_directory.split(os.sep)[0]
    cache_directory = os.path.join(os.path.expanduser('~/CODE/process-asl'),
                                   'procasl_cache', 'heroes',
                                   subject_directory)
    if not os.path.exists(cache_directory):
        os.mkdir(cache_directory)

    os.chdir(cache_directory)  # nipype saves .m scripts in current directory
    mem = Memory(cache_directory)

    # Realign EPIs
    realign = mem.cache(spm.Realign)
    out_realign = realign(in_files=func_file, register_to_mean=True)

    # Coregister anat to mean EPIs
    coregister = mem.cache(spm.Coregister)
    out_coregister = coregister(target=out_realign.outputs.mean_image,
                                source=anat_file,
                                write_interp=3,
                                jobtype='estimate')

    # Segment anat
    segment = mem.cache(spm.Segment)
    out_segment = segment(data=anat_file,
Ejemplo n.º 44
0
current_directory = os.getcwd()

# Loop over subjects
for (func_file, anat_file) in zip(
        heroes['BOLD EPI'], heroes['anat']):
    # Create a memory context
    subject_directory = os.path.relpath(anat_file, subjects_parent_directory)
    subject_directory = subject_directory.split(os.sep)[0]
    cache_directory = os.path.join(os.path.expanduser('~/CODE/process-asl'),
                                   'procasl_cache', 'heroes',
                                   subject_directory)
    if not os.path.exists(cache_directory):
        os.mkdir(cache_directory)

    os.chdir(cache_directory)  # nipype saves .m scripts in current directory
    mem = Memory(cache_directory)

    # Realign EPIs
    realign = mem.cache(spm.Realign)
    out_realign = realign(
        in_files=func_file,
        register_to_mean=True)

    # Coregister anat to mean EPIs
    coregister = mem.cache(spm.Coregister)
    out_coregister = coregister(
        target=out_realign.outputs.mean_image,
        source=anat_file,
        write_interp=3,
        jobtype='estimate')
Ejemplo n.º 45
0
### Convert RAW DICOM data to NIFTI Image Data set
import os, sys
from os.path import join as oj
from glob import glob

import nipype.pipeline.engine as pe
import nipype.interfaces.utility as util
import nipype.interfaces.io as nio
from nipype.interfaces.dcm2nii import Dcm2nii
from nipype.caching import Memory

## This is the location of the Raw DICOM Files from BITC
StoutRawData = '/home/ehecht/BIGDATA/Stout_Homo_Faber/RAWDATA/'
NiPypeOutputDir ='/EINSTEIN_BIGDATA/NIPYPE_WD/Stout_Haber/'
mem = Memory(base_dir=NiPypeOutputDir)  ## Create a memory cache I can use going forward

dcmConverter = mem.cache(Dcm2nii)

DicomScanSet = { 'T1':         { 'dcmDir' : '*t1*', 'outputDirName': 'T1'}, 
     'diff5B0_PA': { 'dcmDir': 'cmrr_mbep2_diff_*5B0_PA*', 'outputDirName':'DTI/preprocess/diff5B0_PA'},
                 'diffAP':     { 'dcmDir': 'cmrr_mbep2d_diff_AP_[1-9]*', 'outputDirName': 'DTI/preprocess/AP'},
                 'AP_ADC':     { 'dcmDir': 'cmrr_mbep2d_diff_AP_ADC_[1-9]*', 'outputDirName': 'DTI/preprocess/AP_ADC'},
                 'AP_FA':      { 'dcmDir': 'cmrr_mbep2d_diff_AP_FA_[1-9]*', 'outputDirName': 'DTI/preprocess/AP_FA'},
                 'AP_TRACEW':  { 'dcmDir': 'cmrr_mbep2d_diff_AP_TRACEW_[1-9]*', 'outputDirName': 'DTI/preprocess/AP_TRACEW'},
        }

#QFE   *t1* for *t1*mprage*??? 

#     # convert dicoms
#     $statement  = " dcm2nii -o $WORKINGDATAPATH/" . $subj[$i] . "/T1/ ";
#     $statement .= " $RAWDATAPATH/" . $subjID[$i] . "/t1*/ ";
Ejemplo n.º 46
0
#!/home/groups/russpold/software/miniconda/envs/fmri/bin/python
from argparse import ArgumentParser
import glob
import nibabel as nib
from nilearn.image import concat_imgs, smooth_img, mean_img, math_img, resample_to_img
from  nipype.interfaces import fsl
from nipype.caching import Memory
mem = Memory(base_dir='.')
import numpy as np
import os
import pandas as pd
import pickle
import re
import sys
sys.path.append(os.path.join(os.environ['SERVER_SCRIPTS'], 'nistats/level_3'))
from save_randomise import save_randomise
randomise = mem.cache(fsl.Randomise)

#Usage: python level_3.py -m MNUM -r REG

parser = ArgumentParser()
parser.add_argument("-m", "--mnum", help="model number")
parser.add_argument("-r", "--reg", help="regressor name")
parser.add_argument("-tf", "--tfce", help="tfce", action='store_true')
parser.add_argument("-c", "--c_thresh", help="cluster_threshold", default=3)
parser.add_argument("-np", "--num_perm", help="number of permutations", default=1000)
parser.add_argument("-vs", "--var_smooth", help="variance smoothing", default=5)
parser.add_argument("-s", "--sign", help="calculate p values for positive t's")
args = parser.parse_args()
mnum = args.mnum
reg = args.reg
Ejemplo n.º 47
0
    def check_input(self, gui=True):
        print '**** Check Inputs ****'
        diffusion_available = False
        t1_available = False
        t2_available = False
        valid_inputs = False

        mem = Memory(base_dir=os.path.join(self.base_directory,'NIPYPE'))
        swap_and_reorient = mem.cache(SwapAndReorient)

        # Check for (and if existing, convert) diffusion data
        diffusion_model = []
        for model in ['DSI','DTI','HARDI']:
            input_dir = os.path.join(self.base_directory,'RAWDATA',model)
            if len(os.listdir(input_dir)) > 0:
                if convert_rawdata(self.base_directory, input_dir, model):
                    diffusion_available = True
                    diffusion_model.append(model)

        # Check for (and if existing, convert)  T1
        input_dir = os.path.join(self.base_directory,'RAWDATA','T1')
        if len(os.listdir(input_dir)) > 0:
            if convert_rawdata(self.base_directory, input_dir, 'T1_orig'):
                t1_available = True

        # Check for (and if existing, convert)  T2
        input_dir = os.path.join(self.base_directory,'RAWDATA','T2')
        if len(os.listdir(input_dir)) > 0:
            if convert_rawdata(self.base_directory, input_dir, 'T2_orig'):
                t2_available = True   

        if diffusion_available:
            #project.stages['Diffusion'].config.imaging_model_choices = diffusion_model
            if t2_available:
                swap_and_reorient(src_file=os.path.join(self.base_directory,'NIFTI','T2_orig.nii.gz'),
                                  ref_file=os.path.join(self.base_directory,'NIFTI',diffusion_model[0]+'.nii.gz'),
                                  out_file=os.path.join(self.base_directory,'NIFTI','T2.nii.gz'))
            if t1_available:
                swap_and_reorient(src_file=os.path.join(self.base_directory,'NIFTI','T1_orig.nii.gz'),
                                  ref_file=os.path.join(self.base_directory,'NIFTI',diffusion_model[0]+'.nii.gz'),
                                  out_file=os.path.join(self.base_directory,'NIFTI','T1.nii.gz'))
                valid_inputs = True
                input_message = 'Inputs check finished successfully.\nDiffusion and morphological data available.'
            else:
                input_message = 'Error during inputs check.\nMorphological data (T1) not available.'
        elif t1_available:
            input_message = 'Error during inputs check. \nDiffusion data not available (DSI/DTI/HARDI).'
        else:
            input_message = 'Error during inputs check. No diffusion or morphological data available in folder '+os.path.join(self.base_directory,'RAWDATA')+'!'

        imaging_model = ''
        if len(diffusion_model) > 0:
            imaging_model = diffusion_model[0]
         
        if gui: 
            input_notification = Check_Input_Notification(message=input_message, imaging_model_options=diffusion_model,imaging_model=imaging_model)
            input_notification.configure_traits()
            self.global_conf.imaging_model = input_notification.imaging_model
            diffusion_file = os.path.join(self.base_directory,'NIFTI',input_notification.imaging_model+'.nii.gz')
            n_vol = nib.load(diffusion_file).shape[3]
            if self.stages['Preprocessing'].config.end_vol == 0 or self.stages['Preprocessing'].config.end_vol == self.stages['Preprocessing'].config.max_vol or self.stages['Preprocessing'].config.end_vol >= n_vol-1:
                self.stages['Preprocessing'].config.end_vol = n_vol-1
            self.stages['Preprocessing'].config.max_vol = n_vol-1
            self.stages['Registration'].config.imaging_model = input_notification.imaging_model
            self.stages['Diffusion'].config.imaging_model = input_notification.imaging_model
        else:
            print input_message
            self.global_conf.imaging_model = imaging_model
            diffusion_file = os.path.join(self.base_directory,'NIFTI',imaging_model+'.nii.gz')
            n_vol = nib.load(diffusion_file).shape[3]
            if self.stages['Preprocessing'].config.end_vol == 0 or self.stages['Preprocessing'].config.end_vol == self.stages['Preprocessing'].config.max_vol or self.stages['Preprocessing'].config.end_vol >= n_vol-1:
                self.stages['Preprocessing'].config.end_vol = n_vol-1
            self.stages['Preprocessing'].config.max_vol = n_vol-1
            self.stages['Registration'].config.imaging_model = imaging_model
            self.stages['Diffusion'].config.imaging_model = imaging_model
       
        if t2_available:
            self.stages['Registration'].config.registration_mode_trait = ['Linear (FSL)','BBregister (FS)','Nonlinear (FSL)']
       
        self.fill_stages_outputs()
       
        return valid_inputs
from procasl import preprocessing, _utils
current_directory = os.getcwd()
for (func_file, anat_file) in zip(
        heroes['func ASL'], heroes['anat']):
    # Create a memory context
    subject_directory = os.path.relpath(anat_file, subjects_parent_directory)
    subject_directory = subject_directory.split(os.sep)[0]
    cache_directory = os.path.join(os.path.expanduser('~/CODE/process-asl'),
                                   'procasl_cache', 'heroes',
                                   subject_directory)
    if not os.path.exists(cache_directory):
        os.mkdir(cache_directory)

    # nipype saves .m scripts into cwd
    os.chdir(cache_directory)
    mem = Memory(cache_directory)

    # Get Tag/Control sequence
    get_tag_ctl = mem.cache(preprocessing.RemoveFirstScanControl)
    out_get_tag_ctl = get_tag_ctl(in_file=func_file)

    # Rescale
    rescale = mem.cache(preprocessing.Rescale)
    out_rescale = rescale(in_file=out_get_tag_ctl.outputs.tag_ctl_file,
                          ss_tr=35.4, t_i_1=800., t_i_2=1800.)

    # Realign to first scan
    realign = mem.cache(preprocessing.ControlTagRealign)
    out_realign = realign(
        in_file=out_rescale.outputs.rescaled_file,
        register_to_mean=False,
"""
===================
Coregistration demo
===================

This example shows a basic coregistration step from anatomical to mean
functional.
"""
# Create a memory context
from nipype.caching import Memory
mem = Memory('/tmp')

# Compute mean functional
from procasl import preprocessing
average = mem.cache(preprocessing.Average)
out_average = average(in_file='/tmp/func.nii')
mean_func = out_average.outputs.mean_file

# Coregister anat to mean functional
from nipype.interfaces import spm
coregister = mem.cache(spm.Coregister)
out_coregister = coregister(
    target=mean_func,
    source='/tmp/anat.nii',
    write_interp=3)

# Check coregistration
import matplotlib.pylab as plt
from nilearn import plotting
figure = plt.figure(figsize=(5, 4))
display = plotting.plot_anat(mean_func, figure=figure, display_mode='z',
Ejemplo n.º 50
0
    def check_input(self, gui=True):
        print '**** Check Inputs ****'
        diffusion_available = False
        t1_available = False
        t2_available = False
        valid_inputs = False

        mem = Memory(base_dir=os.path.join(self.base_directory, 'NIPYPE'))
        swap_and_reorient = mem.cache(SwapAndReorient)

        # Check for (and if existing, convert) diffusion data
        diffusion_model = []
        for model in ['DSI', 'DTI', 'HARDI']:
            input_dir = os.path.join(self.base_directory, 'RAWDATA', model)
            if len(os.listdir(input_dir)) > 0:
                if convert_rawdata(self.base_directory, input_dir, model):
                    diffusion_available = True
                    diffusion_model.append(model)

        # Check for (and if existing, convert)  T1
        input_dir = os.path.join(self.base_directory, 'RAWDATA', 'T1')
        if len(os.listdir(input_dir)) > 0:
            if convert_rawdata(self.base_directory, input_dir, 'T1_orig'):
                t1_available = True

        # Check for (and if existing, convert)  T2
        input_dir = os.path.join(self.base_directory, 'RAWDATA', 'T2')
        if len(os.listdir(input_dir)) > 0:
            if convert_rawdata(self.base_directory, input_dir, 'T2_orig'):
                t2_available = True

        if diffusion_available:
            #project.stages['Diffusion'].config.imaging_model_choices = diffusion_model
            if t2_available:
                swap_and_reorient(
                    src_file=os.path.join(self.base_directory, 'NIFTI',
                                          'T2_orig.nii.gz'),
                    ref_file=os.path.join(self.base_directory, 'NIFTI',
                                          diffusion_model[0] + '.nii.gz'),
                    out_file=os.path.join(self.base_directory, 'NIFTI',
                                          'T2.nii.gz'))
            if t1_available:
                swap_and_reorient(
                    src_file=os.path.join(self.base_directory, 'NIFTI',
                                          'T1_orig.nii.gz'),
                    ref_file=os.path.join(self.base_directory, 'NIFTI',
                                          diffusion_model[0] + '.nii.gz'),
                    out_file=os.path.join(self.base_directory, 'NIFTI',
                                          'T1.nii.gz'))
                valid_inputs = True
                input_message = 'Inputs check finished successfully.\nDiffusion and morphological data available.'
            else:
                input_message = 'Error during inputs check.\nMorphological data (T1) not available.'
        elif t1_available:
            input_message = 'Error during inputs check. \nDiffusion data not available (DSI/DTI/HARDI).'
        else:
            input_message = 'Error during inputs check. No diffusion or morphological data available in folder ' + os.path.join(
                self.base_directory, 'RAWDATA') + '!'

        imaging_model = diffusion_model[0]

        if gui:
            input_notification = Check_Input_Notification(
                message=input_message,
                imaging_model_options=diffusion_model,
                imaging_model=imaging_model)
            input_notification.configure_traits()
            self.global_conf.imaging_model = input_notification.imaging_model
            diffusion_file = os.path.join(
                self.base_directory, 'NIFTI',
                input_notification.imaging_model + '.nii.gz')
            n_vol = nib.load(diffusion_file).shape[3]
            if self.stages['Preprocessing'].config.end_vol == 0 or self.stages[
                    'Preprocessing'].config.end_vol == self.stages[
                        'Preprocessing'].config.max_vol or self.stages[
                            'Preprocessing'].config.end_vol >= n_vol - 1:
                self.stages['Preprocessing'].config.end_vol = n_vol - 1
            self.stages['Preprocessing'].config.max_vol = n_vol - 1
            self.stages[
                'Registration'].config.imaging_model = input_notification.imaging_model
            self.stages[
                'Diffusion'].config.imaging_model = input_notification.imaging_model
        else:
            print input_message
            self.global_conf.imaging_model = imaging_model
            diffusion_file = os.path.join(self.base_directory, 'NIFTI',
                                          imaging_model + '.nii.gz')
            n_vol = nib.load(diffusion_file).shape[3]
            if self.stages['Preprocessing'].config.end_vol == 0 or self.stages[
                    'Preprocessing'].config.end_vol == self.stages[
                        'Preprocessing'].config.max_vol or self.stages[
                            'Preprocessing'].config.end_vol >= n_vol - 1:
                self.stages['Preprocessing'].config.end_vol = n_vol - 1
            self.stages['Preprocessing'].config.max_vol = n_vol - 1
            self.stages['Registration'].config.imaging_model = imaging_model
            self.stages['Diffusion'].config.imaging_model = imaging_model

        if t2_available:
            self.stages['Registration'].config.registration_mode_trait = [
                'Linear (FSL)', 'BBregister (FS)', 'Nonlinear (FSL)'
            ]

        self.fill_stages_outputs()

        return valid_inputs
Ejemplo n.º 51
0
randomize.inputs.tfce = True
randomize.inputs.vox_p_values = True
randomize.inputs.num_perm = 200
#randomize.inputs.var_smooth = 5

randomize.run()
#%% Graph it
fig = nilearn.plotting.plot_stat_map('/media/Data/work/KPE_SPM/fslRandomize/randomize/randomise_tstat1.nii.gz', alpha=0.7 , cut_coords=(0, 45, -7))
fig.add_contours('/media/Data/work/custom_modelling_spm/randomize/randomise_tfce_corrp_tstat1.nii.gz', levels=[0.99], colors='w')
#%% opposite image run
fig = nilearn.plotting.plot_stat_map('/media/Data/work/custom_modelling_spm/neg/randomize/randomise_tstat1.nii.gz', alpha=0.7 , cut_coords=(0, 45, -7))
fig.add_contours('/media/Data/work/custom_modelling_spm/neg/randomize/randomise_tfce_corrp_tstat1.nii.gz', levels=[0.95], colors='w')
#%%
from nipype.caching import Memory
datadir = "/media/Data/work/"
mem = Memory(base_dir='/media/Data/work/custom_modelling_spm')
randomise = mem.cache(fsl.Randomise)
randomise_results = randomise(in_file=os.path.join(datadir, "custom_modelling_spm", "GainvsAmb_cope.nii.gz"),
                              mask=os.path.join(datadir, "custom_modelling_spm", "group_mask.nii.gz"),
                              one_sample_group_mean=True,
                              tfce=True,
                              vox_p_values=True,
                              num_perm=500)
randomise_results.outputs

#%% Look at results
fig = nilearn.plotting.plot_stat_map(randomise_results.outputs.tstat_files[0], alpha=0.7)# , cut_coords=(-20, -80, 18))
fig.add_contours(randomise_results.outputs.t_corrected_p_files[0], levels=[0.95], colors='w')


#%% F contrasts
# Read the paradigm
import numpy as np
paradigm = np.recfromcsv(paradigm_file)
conditions = np.unique(paradigm['name']).tolist()
onsets = [paradigm['onset'][paradigm['name'] == condition].tolist()
          for condition in conditions]
durations = [paradigm['duration'][paradigm['name'] == condition].tolist()
             for condition in conditions]

# Create a memory context
from nipype.caching import Memory
current_directory = os.getcwd()
cache_directory = '/tmp'
os.chdir(cache_directory)
mem = Memory(cache_directory)

#  Generate SPM-specific Model
from nipype.algorithms.modelgen import SpecifySPMModel
from nipype.interfaces.base import Bunch
subject_info = Bunch(conditions=conditions, onsets=onsets, durations=durations)
tr = 2.5
modelspec = mem.cache(SpecifySPMModel)
out_modelspec = modelspec(
    input_units='secs',
    time_repetition=tr,
    high_pass_filter_cutoff=128,
    realignment_parameters=realignment_parameters,
    functional_runs=func_file,
    subject_info=subject_info)