Exemplo n.º 1
0
    def _delete_orientation(self):
        """
        Delete orientation metadata. Garbage orientation metadata can lead to
        severe mis-registration trouble.

        """

        # prepare for smart caching
        cache_dir = os.path.join(self.output_dir, 'cache_dir')
        if not os.path.exists(cache_dir):
            os.makedirs(cache_dir)
        mem = Memory(cachedir=cache_dir, verbose=5)

        # deleteorient for func
        for attr in ['n_sessions', 'session_output_dirs']:
            if getattr(self, attr) is None:
                warnings.warn("'%s' attribute of is None! Skipping" % attr)
                break
        else:
            self.func = [mem.cache(delete_orientation)(
                self.func[sess], self.session_output_dirs[sess])
                         for sess in range(self.n_sessions)]

        # deleteorient for anat
        if not self.anat is None:
            self.anat = mem.cache(delete_orientation)(
                self.anat, self.anat_output_dir)
Exemplo n.º 2
0
    def _niigz2nii(self):
        """
        Convert .nii.gz to .nii (crucial for SPM).

        """
        cache_dir = os.path.join(self.scratch, 'cache_dir')
        mem = Memory(cache_dir, verbose=100)
        self._sanitize_session_output_dirs()
        if not None in [self.func, self.n_sessions, self.session_output_dirs]:
            self.func = [mem.cache(do_niigz2nii)(
                self.func[sess], output_dir=self.session_output_dirs[sess])
                         for sess in range(self.n_sessions)]
        if not self.anat is None:
            self.anat = mem.cache(do_niigz2nii)(
                self.anat, output_dir=self.anat_output_dir)
Exemplo n.º 3
0
    def _niigz2nii(self):
        """
        Convert .nii.gz to .nii (crucial for SPM).

        """

        cache_dir = os.path.join(self.scratch, 'cache_dir')
        mem = Memory(cache_dir, verbose=100)

        self.func = [
            mem.cache(do_niigz2nii)(self.func[sess],
                                    output_dir=self.session_output_dirs[sess])
            for sess in xrange(self.n_sessions)
        ]

        if not self.anat is None:
            self.anat = mem.cache(do_niigz2nii)(
                self.anat, output_dir=self.anat_output_dir)
Exemplo n.º 4
0
    def _delete_orientation(self):
        """
        Delete orientation metadata. Garbage orientation metadata can lead to
        severe mis-registration trouble.

        """

        # prepare for smart caching
        cache_dir = os.path.join(self.output_dir, 'cache_dir')
        if not os.path.exists(cache_dir):
            os.makedirs(cache_dir)
        mem = Memory(cachedir=cache_dir, verbose=5)

        # deleteorient for func
        self.func = [mem.cache(delete_orientation)(
                self.func[sess],
                self.session_output_dirs[sess])
                     for sess in xrange(self.n_sessions)]

        # deleteorient for anat
        if not self.anat is None:
            self.anat = mem.cache(delete_orientation)(
                self.anat, self.anat_output_dir)
Exemplo n.º 5
0
    def _delete_orientation(self):
        """
        Delete orientation metadata. Garbage orientation metadata can lead to
        severe mis-registration trouble.

        """

        # prepare for smart caching
        cache_dir = os.path.join(self.output_dir, 'cache_dir')
        if not os.path.exists(cache_dir):
            os.makedirs(cache_dir)
        mem = Memory(cachedir=cache_dir, verbose=5)

        # deleteorient for func
        self.func = [
            mem.cache(delete_orientation)(self.func[sess],
                                          self.session_output_dirs[sess])
            for sess in xrange(self.n_sessions)
        ]

        # deleteorient for anat
        if not self.anat is None:
            self.anat = mem.cache(delete_orientation)(self.anat,
                                                      self.anat_output_dir)
Exemplo n.º 6
0
def do_subject_preproc(subject_id,
                       output_dir,
                       func,
                       anat,
                       do_bet=True,
                       do_mc=True,
                       do_coreg=True,
                       do_normalize=True,
                       cmd_prefix="fsl5.0-",
                       **kwargs
                       ):
    """
    Preprocesses subject data using FSL.

    Parameters
    ----------

    """

    output = {'func': func,
              'anat': anat
              }

    # output dir
    subject_output_dir = os.path.join(output_dir, subject_id)
    if not os.path.exists(subject_output_dir):
        os.makedirs(subject_output_dir)

    # prepare for smart-caching
    cache_dir = os.path.join(output_dir, "cache_dir")
    if not os.path.exists(cache_dir):
        os.makedirs(cache_dir)

    nipype_mem = NipypeMemory(base_dir=cache_dir)
    joblib_mem = JoblibMemory(cache_dir, verbose=100)

    # sanitize input files
    if not isinstance(output['func'], basestring):
        output['func'] = joblib_mem.cache(do_fsl_merge)(
            func, subject_output_dir, output_prefix='Merged',
            cmd_prefix=cmd_prefix)

    ######################
    #  Brain Extraction
    ######################
    if do_bet:
        if not fsl.BET._cmd.startswith("fsl"):
            fsl.BET._cmd = cmd_prefix + fsl.BET._cmd

        bet = nipype_mem.cache(fsl.BET)
        bet_results = bet(in_file=output['anat'],
                          )

        output['anat'] = bet_results.outputs.out_file

    #######################
    #  Motion correction
    #######################
    if do_mc:
        if not fsl.MCFLIRT._cmd.startswith("fsl"):
            fsl.MCFLIRT._cmd = cmd_prefix + fsl.MCFLIRT._cmd

        mcflirt = nipype_mem.cache(fsl.MCFLIRT)
        mcflirt_results = mcflirt(in_file=output['func'],
                                  cost='mutualinfo',
                                  save_mats=True,  # save mc matrices
                                  save_plots=True  # save mc params
                                  )

        output['motion_parameters'] = mcflirt_results.outputs.par_file
        output['motion_matrices'] = mcflirt_results.outputs.mat_file
        output['func'] = mcflirt_results.outputs.out_file

    ###################
    # Coregistration
    ###################
    if do_coreg:
        if not fsl.FLIRT._cmd.startswith("fsl"):
            fsl.FLIRT._cmd = cmd_prefix + fsl.FLIRT._cmd

        flirt1 = nipype_mem.cache(fsl.FLIRT)
        flirt1_results = flirt1(in_file=output['func'],
                                reference=output['anat']
                                )

        if not do_normalize:
            output['func'] = flirt1_results.outputs.out_file

    ##########################
    # Spatial normalization
    ##########################
    if do_normalize:
        if not fsl.FLIRT._cmd.startswith("fsl"):
            fsl.FLIRT._cmd = cmd_prefix + fsl.FLIRT._cmd

        # T1 normalization
        flirt2 = nipype_mem.cache(fsl.FLIRT)
        flirt2_results = flirt2(in_file=output['anat'],
                                reference=FSL_T1_TEMPLATE)

        output['anat'] = flirt2_results.outputs.out_file

        # concatenate 'func -> anat' and 'anat -> standard space'
        # transformation matrices to obtaun 'func -> standard space'
        # transformation matrix
        if do_coreg:
            if not fsl.ConvertXFM._cmd.startswith("fsl"):
                fsl.ConvertXFM._cmd = cmd_prefix + fsl.ConvertXFM._cmd

                convertxfm = nipype_mem.cache(fsl.ConvertXFM)
                convertxfm_results = convertxfm(
                    in_file=flirt1_results.outputs.out_matrix_file,
                    in_file2=flirt2_results.outputs.out_matrix_file,
                    concat_xfm=True
                    )

        # warp func data into standard space by applying
        # 'func -> standard space' transformation matrix
        if not fsl.ApplyXfm._cmd.startswith("fsl"):
            fsl.ApplyXfm._cmd = cmd_prefix + fsl.ApplyXfm._cmd

        applyxfm = nipype_mem.cache(fsl.ApplyXfm)
        applyxfm_results = applyxfm(
            in_file=output['func'],
            in_matrix_file=convertxfm_results.outputs.out_file,
            reference=FSL_T1_TEMPLATE
            )

        output['func'] = applyxfm_results.outputs.out_file

    return output
Exemplo n.º 7
0
def do_subject_glm(subject_data):
    """FE analysis for a single subject."""
    subject_id = subject_data['subject_id']
    output_dir = subject_data["output_dir"]
    func_files = subject_data['func']
    anat = subject_data['anat']
    onset_files = subject_data['onset']
    # subject_id = os.path.basename(subject_dir)
    # subject_output_dir = os.path.join(output_dir, subject_id)
    mem = Memory(os.path.join(output_dir, "cache"))
    if not os.path.exists(output_dir):
        os.makedirs(output_dir)

    # glob files: anat, session func files, session onset files
    # anat = glob.glob(os.path.join(subject_dir, anat_wildcard))
    # assert len(anat) == 1
    # anat = anat[0]
    # onset_files = sorted([glob.glob(os.path.join(subject_dir, session))[0]
    #                       for session in session_onset_wildcards])
    # func_files = sorted([sorted(glob.glob(os.path.join(subject_dir, session)))
    #                      for session in session_func_wildcards])

    ### Preprocess data #######################################################
    if 0:
        subject_data = mem.cache(do_subject_preproc)(
            dict(func=func_files, anat=anat, output_dir=output_dir))
        func_files = subject_data['func']
        anat = subject_data['anat']

        # reslice func images
        func_files = [mem.cache(reslice_vols)(
                sess_func,
                target_affine=nibabel.load(sess_func[0]).get_affine())
                  for sess_func in func_files]

    ### GLM: loop on (session_bold, onse_file) pairs over the various sessions
    design_matrices = []
    for session, (func_file, onset_file) in enumerate(zip(func_files,
                                                          onset_files)):
        if isinstance(func_file, str):
            bold = nibabel.load(func_file)
        else:
            if len(func_file) == 1:
                func_file = func_file[0]
                bold = nibabel.load(func_file)
                assert len(bold.shape) == 4
                n_scans = bold.shape[-1]
                del bold
            else:
                n_scans = len(func_file)
        frametimes = np.linspace(0, (n_scans - 1) * tr, n_scans)
        conditions, onsets, durations, amplitudes = parse_onset_file(
            onset_file)
        onsets *= tr
        durations *= tr
        paradigm = BlockParadigm(con_id=conditions, onset=onsets,
                                 duration=durations, amplitude=amplitudes)
        design_matrices.append(make_dmtx(
                frametimes,
                paradigm, hrf_model=hrf_model,
                drift_model=drift_model, hfcut=hfcut))

    # specify contrasts
    n_columns = len(design_matrices[0].names)
    contrasts = {}
    for i in xrange(paradigm.n_conditions):
        contrasts['%s' % design_matrices[0].names[2 * i]
                  ] = np.eye(n_columns)[2 * i]

    # more interesting contrasts
    contrasts['faces-scrambled'] = contrasts['faces'
                                             ] - contrasts['scrambled']
    contrasts['scrambled-faces'] = -contrasts['faces-scrambled']
    contrasts['effects_of_interest'] = contrasts['faces'
                                                 ] + contrasts['scrambled']

    # effects of interest F-test
    diff_contrasts = []
    for i in xrange(paradigm.n_conditions - 1):
        a = contrasts[design_matrices[0].names[2 * i]]
        b = contrasts[design_matrices[0].names[2 * (i + 1)]]
        diff_contrasts.append(a - b)
    contrasts["diff"] = diff_contrasts

    # fit GLM
    print 'Fitting a GLM (this takes time)...'
    fmri_glm = FMRILinearModel([nibabel.concat_images(sess_func,
                                                      check_affines=False)
                                for sess_func in func_files],
                               [design_matrix.matrix
                                for design_matrix in design_matrices],
                               mask='compute'
                               )
    fmri_glm.fit(do_scaling=True, model='ar1')

    # save computed mask
    mask_path = os.path.join(output_dir, "mask.nii.gz")

    print "Saving mask image %s" % mask_path
    nibabel.save(fmri_glm.mask, mask_path)

    # compute contrasts
    z_maps = {}
    effects_maps = {}
    for contrast_id, contrast_val in contrasts.iteritems():
        print "\tcontrast id: %s" % contrast_id
        if np.ndim(contrast_val) > 1:
            contrast_type = "t"
        else:
            contrast_type = "F"
        z_map, t_map, effects_map, var_map = fmri_glm.contrast(
            [contrast_val] * 2,
            con_id=contrast_id,
            contrast_type=contrast_type,
            output_z=True,
            output_stat=True,
            output_effects=True,
            output_variance=True
            )

        # store stat maps to disk
        for map_type, out_map in zip(['z', 't', 'effects', 'variance'],
                                  [z_map, t_map, effects_map, var_map]):
            map_dir = os.path.join(
                output_dir, '%s_maps' % map_type)
            if not os.path.exists(map_dir):
                os.makedirs(map_dir)
            map_path = os.path.join(
                map_dir, '%s.nii.gz' % contrast_id)
            print "\t\tWriting %s ..." % map_path
            nibabel.save(out_map, map_path)

            # collect zmaps for contrasts we're interested in
            if map_type == 'z':
                z_maps[contrast_id] = map_path
            if map_type == 'effects':
                effects_maps[contrast_id] = map_path

    return subject_id, anat, effects_maps, z_maps, contrasts, fmri_glm.mask
Exemplo n.º 8
0
            map_path = os.path.join(
                map_dir, '%s.nii.gz' % contrast_id)
            print "\t\tWriting %s ..." % map_path
            nibabel.save(out_map, map_path)

            # collect zmaps for contrasts we're interested in
            if map_type == 'z':
                z_maps[contrast_id] = map_path
            if map_type == 'effects':
                effects_maps[contrast_id] = map_path

    return subject_id, anat, effects_maps, z_maps, contrasts, fmri_glm.mask


if __name__ == "__maih__":
    mem = Memory(os.path.join(output_dir, "cache"))
    first_level_glms = map(mem.cache(do_subject_glm), subject_dirs)

    # plot stats (per subject)
    import matplotlib.pyplot as plt
    import nipy.labs.viz as viz
    all_masks = []
    all_effects_maps = []
    for (subject_id, anat, effects_maps, z_maps,
         contrasts, mask) in first_level_glms:
        all_masks.append(mask)
        anat_img = nibabel.load(anat)
        z_map = nibabel.load(z_maps.values()[0])
        all_effects_maps.append(effects_maps)
        for contrast_id, z_map in z_maps.iteritems():
            z_map = nibabel.load(z_map)
def _do_fmri_distortion_correction(
        subject_data,
        # i'm unsure of the readout time,
        # but this is constant across both PE
        # directions and so can be scaled to 1
        # (or any other nonzero float)
        protocol="MOTOR",
        readout_time=.01392,
        realign=True,
        coregister=True,
        coreg_func_to_anat=True,
        dc=True,
        segment=False,
        normalize=False,
        func_write_voxel_sizes=None,
        anat_write_voxel_sizes=None,
        report=False,
        **kwargs):
    """
    Function to undistort task fMRI data for a given HCP subject.

    """

    directions = ['LR', 'RL']

    subject_data.sanitize()

    if dc:
        acq_params = [[1, 0, 0, readout_time], [-1, 0, 0, readout_time]]
        acq_params_file = os.path.join(subject_data.output_dir,
                                       "b0_acquisition_params.txt")
        np.savetxt(acq_params_file, acq_params, fmt='%f')

        fieldmap_files = [
            os.path.join(
                os.path.dirname(subject_data.func[sess]),
                "%s_3T_SpinEchoFieldMap_%s.nii.gz" %
                (subject_data.subject_id, directions[sess]))
            for sess in xrange(subject_data.n_sessions)
        ]
        sbref_files = [
            sess_func.replace(".nii", "_SBRef.nii")
            for sess_func in subject_data.func
        ]

        # prepare for smart caching
        mem = Memory(os.path.join(subject_data.output_dir, "cache_dir"))

        for x in [fieldmap_files, sbref_files, subject_data.func]:
            assert len(x) == 2
            for y in x:
                assert os.path.isfile(y), y

        # fslroi
        zeroth_fieldmap_files = []
        for fieldmap_file in fieldmap_files:
            if not os.path.isfile(fieldmap_file):
                print "Can't find fieldmap file %s; skipping subject %s" % (
                    fieldmap_file, subject_data.subject_id)
                return

            # peel 0th volume of each fieldmap
            zeroth_fieldmap_file = os.path.join(
                subject_data.output_dir,
                "0th_%s" % os.path.basename(fieldmap_file))
            fslroi_cmd = "fsl5.0-fslroi %s %s 0 1" % (fieldmap_file,
                                                      zeroth_fieldmap_file)
            print "\r\nExecuting '%s' ..." % fslroi_cmd
            print mem.cache(commands.getoutput)(fslroi_cmd)

            zeroth_fieldmap_files.append(zeroth_fieldmap_file)

        # merge the 0th volume of both fieldmaps
        merged_zeroth_fieldmap_file = os.path.join(
            subject_data.output_dir, "merged_with_other_direction_%s" %
            (os.path.basename(zeroth_fieldmap_files[0])))
        fslmerge_cmd = "fsl5.0-fslmerge -t %s %s %s" % (
            merged_zeroth_fieldmap_file, zeroth_fieldmap_files[0],
            zeroth_fieldmap_files[1])
        print "\r\nExecuting '%s' ..." % fslmerge_cmd
        print mem.cache(commands.getoutput)(fslmerge_cmd)

        # do topup (learn distortion model)
        topup_results_basename = os.path.join(subject_data.output_dir,
                                              "topup_results")
        topup_cmd = ("fsl5.0-topup --imain=%s --datain=%s --config=b02b0.cnf "
                     "--out=%s" % (merged_zeroth_fieldmap_file,
                                   acq_params_file, topup_results_basename))
        print "\r\nExecuting '%s' ..." % topup_cmd
        print mem.cache(commands.getoutput)(topup_cmd)

        # apply learn deformations to absorb distortion
        dc_fmri_files = []

        for sess in xrange(2):
            # merge SBRef + task BOLD for current PE direction
            assert len(subject_data.func) == 2, subject_data
            fourD_plus_sbref = os.path.join(
                subject_data.output_dir,
                "sbref_plus_" + os.path.basename(subject_data.func[sess]))
            fslmerge_cmd = "fsl5.0-fslmerge -t %s %s %s" % (
                fourD_plus_sbref, sbref_files[sess], subject_data.func[sess])
            print "\r\nExecuting '%s' ..." % fslmerge_cmd
            print mem.cache(commands.getoutput)(fslmerge_cmd)

            # realign task BOLD to SBRef
            sess_output_dir = subject_data.session_output_dirs[sess]
            rfourD_plus_sbref = _do_subject_realign(SubjectData(
                func=[fourD_plus_sbref],
                output_dir=subject_data.output_dir,
                n_sessions=1,
                session_output_dirs=[sess_output_dir]),
                                                    report=False).func[0]

            # apply topup to realigned images
            dc_rfourD_plus_sbref = os.path.join(
                subject_data.output_dir,
                "dc" + os.path.basename(rfourD_plus_sbref))
            applytopup_cmd = (
                "fsl5.0-applytopup --imain=%s --verbose --inindex=%i "
                "--topup=%s --out=%s --datain=%s --method=jac" %
                (rfourD_plus_sbref, sess + 1, topup_results_basename,
                 dc_rfourD_plus_sbref, acq_params_file))
            print "\r\nExecuting '%s' ..." % applytopup_cmd
            print mem.cache(commands.getoutput)(applytopup_cmd)

            # recover undistorted task BOLD
            dc_rfmri_file = dc_rfourD_plus_sbref.replace("sbref_plus_", "")
            fslroi_cmd = "fsl5.0-fslroi %s %s 1 -1" % (dc_rfourD_plus_sbref,
                                                       dc_rfmri_file)
            print "\r\nExecuting '%s' ..." % fslroi_cmd
            print mem.cache(commands.getoutput)(fslroi_cmd)

            # sanity tricks
            if dc_rfmri_file.endswith(".nii"):
                dc_rfmri_file = dc_rfmri_file + ".gz"

            dc_fmri_files.append(dc_rfmri_file)

        subject_data.func = dc_fmri_files
        if isinstance(subject_data.func, basestring):
            subject_data.func = [subject_data.func]

    # continue preprocessing
    subject_data = do_subject_preproc(
        subject_data,
        realign=realign,
        coregister=coregister,
        coreg_anat_to_func=not coreg_func_to_anat,
        segment=True,
        normalize=False,
        report=report)

    # ok for GLM now
    return subject_data
def run_suject_level1_glm(
        subject_data,
        readout_time=.01392,  # seconds
        tr=.72,
        dc=True,
        hrf_model="Canonical with Derivative",
        drift_model="Cosine",
        hfcut=100,
        regress_motion=True,
        slicer='ortho',
        cut_coords=None,
        threshold=3.,
        cluster_th=15,
        normalize=True,
        fwhm=0.,
        protocol="MOTOR",
        func_write_voxel_sizes=None,
        anat_write_voxel_sizes=None,
        **other_preproc_kwargs):
    """
    Function to do preproc + analysis for a single HCP subject (task fMRI)

    """

    add_regs_files = None
    n_motion_regressions = 6
    subject_data.n_sessions = 2

    subject_data.tmp_output_dir = os.path.join(subject_data.output_dir, "tmp")
    if not os.path.exists(subject_data.tmp_output_dir):
        os.makedirs(subject_data.tmp_output_dir)

    if not os.path.exists(subject_data.output_dir):
        os.makedirs(subject_data.output_dir)

    mem = Memory(os.path.join(subject_data.output_dir, "cache_dir"),
                 verbose=100)

    # glob design files (.fsf)
    subject_data.design_files = [
        os.path.join(subject_data.data_dir,
                     ("MNINonLinear/Results/tfMRI_%s_%s/"
                      "tfMRI_%s_%s_hp200_s4_level1.fsf") %
                     (protocol, direction, protocol, direction))
        for direction in ['LR', 'RL']
    ]

    assert len(subject_data.design_files) == 2
    for df in subject_data.design_files:
        assert os.path.isfile(df), df

    if 0x0:
        subject_data = _do_fmri_distortion_correction(
            subject_data,
            dc=dc,
            fwhm=fwhm,
            readout_time=readout_time,
            **other_preproc_kwargs)

    # chronometry
    stats_start_time = pretty_time()

    # merged lists
    paradigms = []
    frametimes_list = []
    design_matrices = []
    # fmri_files = []
    n_scans = []
    # for direction, direction_index in zip(['LR', 'RL'], xrange(2)):
    for sess in xrange(subject_data.n_sessions):
        direction = ['LR', 'RL'][sess]
        # glob the design file
        # design_file = os.path.join(# _subject_data_dir, "tfMRI_%s_%s" % (
        # protocol, direction),
        design_file = subject_data.design_files[sess]
        #                    "tfMRI_%s_%s_hp200_s4_level1.fsf" % (
        # protocol, direction))
        if not os.path.isfile(design_file):
            print "Can't find design file %s; skipping subject %s" % (
                design_file, subject_data.subject_id)
            return

        # read the experimental setup
        print "Reading experimental setup from %s ..." % design_file
        fsl_condition_ids, timing_files, fsl_contrast_ids, contrast_values = \
            read_fsl_design_file(design_file)
        print "... done.\r\n"

        # fix timing filenames
        timing_files = [
            tf.replace("EVs", "tfMRI_%s_%s/EVs" % (protocol, direction))
            for tf in timing_files
        ]

        # make design matrix
        print "Constructing design matrix for direction %s ..." % direction
        _n_scans = nibabel.load(subject_data.func[sess]).shape[-1]
        n_scans.append(_n_scans)
        add_regs_file = add_regs_files[
            sess] if not add_regs_files is None else None
        design_matrix, paradigm, frametimes = make_dmtx_from_timing_files(
            timing_files,
            fsl_condition_ids,
            n_scans=_n_scans,
            tr=tr,
            hrf_model=hrf_model,
            drift_model=drift_model,
            hfcut=hfcut,
            add_regs_file=add_regs_file,
            add_reg_names=[
                'Translation along x axis', 'Translation along yaxis',
                'Translation along z axis', 'Rotation along x axis',
                'Rotation along y axis', 'Rotation along z axis',
                'Differential Translation along x axis',
                'Differential Translation along yaxis',
                'Differential Translation along z axis',
                'Differential Rotation along x axis',
                'Differential Rotation along y axis',
                'Differential Rotation along z axis'
            ][:n_motion_regressions] if not add_regs_files is None else None,
        )

        print "... done."
        paradigms.append(paradigm)
        frametimes_list.append(frametimes)
        design_matrices.append(design_matrix)

        # convert contrasts to dict
        contrasts = dict((
            contrast_id,
            # append zeros to end of contrast to match design
            np.hstack((
                contrast_value,
                np.zeros(len(design_matrix.names) - len(contrast_value)))))
                         for contrast_id, contrast_value in zip(
                             fsl_contrast_ids, contrast_values))

        # more interesting contrasts
        if protocol == 'MOTOR':
            contrasts['RH-LH'] = contrasts['RH'] - contrasts['LH']
            contrasts['LH-RH'] = -contrasts['RH-LH']
            contrasts['RF-LF'] = contrasts['RF'] - contrasts['LF']
            contrasts['LF-RF'] = -contrasts['RF-LF']
            contrasts['H'] = contrasts['RH'] + contrasts['LH']
            contrasts['F'] = contrasts['RF'] + contrasts['LF']
            contrasts['H-F'] = contrasts['RH'] + contrasts['LH'] - (
                contrasts['RF'] - contrasts['LF'])
            contrasts['F-H'] = -contrasts['H-F']

        contrasts = dict((k, v) for k, v in contrasts.iteritems() if "-" in k)

    # replicate contrasts across sessions
    contrasts = dict((cid, [cval] * 2) for cid, cval in contrasts.iteritems())

    cache_dir = cache_dir = os.path.join(subject_data.output_dir, 'cache_dir')
    if not os.path.exists(cache_dir):
        os.makedirs(cache_dir)
    nipype_mem = NipypeMemory(base_dir=cache_dir)

    if 0x0:
        if np.sum(fwhm) > 0.:
            subject_data.func = nipype_mem.cache(spm.Smooth)(
                in_files=subject_data.func,
                fwhm=fwhm,
                ignore_exception=False,
            ).outputs.smoothed_files

    # fit GLM
    def tortoise(*args):
        print args
        print(
            'Fitting a "Fixed Effect" GLM for merging LR and RL '
            'phase-encoding directions for subject %s ...' %
            (subject_data.subject_id))
        fmri_glm = FMRILinearModel(
            subject_data.func,
            [design_matrix.matrix for design_matrix in design_matrices],
            mask='compute')
        fmri_glm.fit(do_scaling=True, model='ar1')
        print "... done.\r\n"

        # save computed mask
        mask_path = os.path.join(subject_data.output_dir, "mask.nii")
        print "Saving mask image to %s ..." % mask_path
        nibabel.save(fmri_glm.mask, mask_path)
        print "... done.\r\n"

        z_maps = {}
        effects_maps = {}
        map_dirs = {}
        for contrast_id, contrast_val in contrasts.iteritems():
            print "\tcontrast id: %s" % contrast_id
            z_map, eff_map = fmri_glm.contrast(contrast_val,
                                               con_id=contrast_id,
                                               output_z=True,
                                               output_effects=True)

            # store stat maps to disk
            for map_type, out_map in zip(['z', 'effects'], [z_map, eff_map]):
                map_dir = os.path.join(subject_data.output_dir,
                                       '%s_maps' % map_type)
                map_dirs[map_type] = map_dir
                if not os.path.exists(map_dir):
                    os.makedirs(map_dir)
                map_path = os.path.join(map_dir,
                                        '%s_%s.nii' % (map_type, contrast_id))
                print "\t\tWriting %s ..." % map_path

                nibabel.save(out_map, map_path)

                # collect zmaps for contrasts we're interested in
                if map_type == 'z':
                    z_maps[contrast_id] = map_path

                if map_type == 'effects':
                    effects_maps[contrast_id] = map_path

        return effects_maps, z_maps, mask_path, map_dirs

    # compute native-space maps and mask
    effects_maps, z_maps, mask_path, map_dirs = mem.cache(tortoise)(
        subject_data.func, subject_data.anat)

    # do stats report
    if 0x0:
        anat_img = nibabel.load(subject_data.anat)
        stats_report_filename = os.path.join(subject_data.output_dir,
                                             "reports", "report_stats.html")
        generate_subject_stats_report(
            stats_report_filename,
            contrasts,
            z_maps,
            nibabel.load(mask_path),
            anat=anat_img.get_data(),
            anat_affine=anat_img.get_affine(),
            threshold=threshold,
            cluster_th=cluster_th,
            slicer=slicer,
            cut_coords=cut_coords,
            design_matrices=design_matrices,
            subject_id=subject_data.subject_id,
            start_time=stats_start_time,
            title="GLM for subject %s" % subject_data.subject_id,

            # additional ``kwargs`` for more informative report
            TR=tr,
            n_scans=n_scans,
            hfcut=hfcut,
            drift_model=drift_model,
            hrf_model=hrf_model,
            paradigm={
                'LR': paradigms[0].__dict__,
                'RL': paradigms[1].__dict__
            },
            frametimes={
                'LR': frametimes_list[0],
                'RL': frametimes_list[1]
            },
            fwhm=fwhm)

        ProgressReport().finish_dir(subject_data.output_dir)
        print "\r\nStatistic report written to %s\r\n" % stats_report_filename

    # remove repeated contrasts
    contrasts = dict((cid, cval[0]) for cid, cval in contrasts.iteritems())
    import json
    json.dump(
        dict((k, list(v)) for k, v in contrasts.iteritems()),
        open(os.path.join(subject_data.tmp_output_dir, "contrasts.json"), "w"))
    subject_data.contrasts = contrasts

    if normalize:
        assert hasattr(subject_data, "parameter_file")

        subject_data.native_effects_maps = effects_maps
        subject_data.native_z_maps = z_maps
        subject_data.native_mask_path = mask_path

        # warp effects maps and mask from native to standard space (MNI)
        apply_to_files = [
            v for _, v in subject_data.native_effects_maps.iteritems()
        ] + [subject_data.native_mask_path]
        tmp = nipype_mem.cache(spm.Normalize)(
            parameter_file=getattr(subject_data, "parameter_file"),
            apply_to_files=apply_to_files,
            write_bounding_box=[[-78, -112, -50], [78, 76, 85]],
            write_voxel_sizes=func_write_voxel_sizes,
            write_wrap=[0, 0, 0],
            write_interp=1,
            jobtype='write',
            ignore_exception=False,
        ).outputs.normalized_files

        subject_data.mask = hard_link(tmp[-1], subject_data.output_dir)
        subject_data.effects_maps = dict(
            zip(effects_maps.keys(), hard_link(tmp[:-1], map_dirs["effects"])))

        # warp anat image
        subject_data.anat = hard_link(
            nipype_mem.cache(spm.Normalize)(
                parameter_file=getattr(subject_data, "parameter_file"),
                apply_to_files=subject_data.anat,
                write_bounding_box=[[-78, -112, -50], [78, 76, 85]],
                write_voxel_sizes=anat_write_voxel_sizes,
                write_wrap=[0, 0, 0],
                write_interp=1,
                jobtype='write',
                ignore_exception=False,
            ).outputs.normalized_files, subject_data.anat_output_dir)
    else:
        subject_data.mask = mask_path
        subject_data.effects_maps = effects_maps
        subject_data.z_maps = z_maps

    return subject_data
"""
:Synopsis:  Step-by-step example usage of purepython_preroc_pipeline module
:Author: DOHMATOB Elvis Dopgima <*****@*****.**>

"""

from pypreprocess.datasets import fetch_spm_auditory_data
from pypreprocess.slice_timing import fMRISTC
from pypreprocess.realign import MRIMotionCorrection
from pypreprocess.coreg import Coregister
from pypreprocess.external.joblib import Memory
import os

# create cache
mem = Memory('/tmp/stepwise_cache', verbose=100)

# fetch input data
sd = fetch_spm_auditory_data(
    os.path.join(os.environ['HOME'], "CODE/datasets/spm_auditory"))
n_sessions = 1  # this dataset has 1 session (i.e 1 fMRI acquisiton or run)

do_subject_preproc(sd.__dict__(),
                   concat=False,
                   coregister=True,
                   stc=True,
                   cv_tc=True,
                   realign=True,
                   report=True)
"""
:Synopsis:  Step-by-step example usage of purepython_preroc_pipeline module
:Author: DOHMATOB Elvis Dopgima <*****@*****.**>

"""

from pypreprocess.datasets import fetch_spm_auditory_data
from pypreprocess.slice_timing import fMRISTC
from pypreprocess.realign import MRIMotionCorrection
from pypreprocess.coreg import Coregister
from pypreprocess.external.joblib import Memory
import os

# create cache
mem = Memory('/tmp/stepwise_cache', verbose=100)

# fetch input data
sd = fetch_spm_auditory_data(os.path.join(
        os.environ['HOME'],
        "CODE/datasets/spm_auditory"))
n_sessions = 1  # this dataset has 1 session (i.e 1 fMRI acquisiton or run)

# ouput dict
output = {'func': [sd.func],  # one fMRI 4D datum per session
          'anat': sd.anat, 'n_sessions': n_sessions}

##################################
# Slice-Timing Correction (STC)
##################################
for sess_func, sess_id in zip(output['func'], xrange(n_sessions)):
    # session fit
Exemplo n.º 13
0
Demo script for the coreg.py module (coregistration in pure python).

It demos coregistration on a variety of datasets including:
SPM single-subject auditory, NYU rest, ABIDE, etc.
"""

import os
import glob
import matplotlib.pyplot as plt
from pypreprocess.datasets import fetch_spm_auditory, fetch_nyu_rest
from pypreprocess.reporting.check_preprocessing import plot_registration
from pypreprocess.coreg import Coregister
from pypreprocess.external.joblib import Memory

# misc
mem = Memory("demos_cache")


def _run_demo(func, anat):
    # fit
    coreg = Coregister().fit(anat, func)

    # apply coreg
    VFk = coreg.transform(func)

    # QA
    plot_registration(anat, VFk, title="before coreg")
    plot_registration(VFk, anat, title="after coreg")
    plt.show()

def _do_fmri_distortion_correction(subject_data,
                                   # i'm unsure of the readout time,
                                   # but this is constant across both PE
                                   # directions and so can be scaled to 1
                                   # (or any other nonzero float)
                                   protocol="MOTOR",
                                   readout_time=.01392,
                                   realign=True,
                                   coregister=True,
                                   coreg_func_to_anat=True,
                                   dc=True,
                                   segment=False,
                                   normalize=False,
                                   func_write_voxel_sizes=None,
                                   anat_write_voxel_sizes=None,
                                   report=False,
                                   **kwargs
                                   ):
    """
    Function to undistort task fMRI data for a given HCP subject.

    """

    directions = ['LR', 'RL']

    subject_data.sanitize()

    if dc:
        acq_params = [[1, 0, 0, readout_time], [-1, 0, 0, readout_time]]
        acq_params_file = os.path.join(subject_data.output_dir,
                                       "b0_acquisition_params.txt")
        np.savetxt(acq_params_file, acq_params, fmt='%f')

        fieldmap_files = [os.path.join(os.path.dirname(
                    subject_data.func[sess]),
                                       "%s_3T_SpinEchoFieldMap_%s.nii.gz" % (
                    subject_data.subject_id, directions[sess]))
                          for sess in xrange(subject_data.n_sessions)]
        sbref_files = [sess_func.replace(".nii", "_SBRef.nii")
                       for sess_func in subject_data.func]

        # prepare for smart caching
        mem = Memory(os.path.join(subject_data.output_dir, "cache_dir"))

        for x in [fieldmap_files, sbref_files, subject_data.func]:
            assert len(x) == 2
            for y in x:
                assert os.path.isfile(y), y

        # fslroi
        zeroth_fieldmap_files = []
        for fieldmap_file in fieldmap_files:
            if not os.path.isfile(fieldmap_file):
                print "Can't find fieldmap file %s; skipping subject %s" % (
                    fieldmap_file, subject_data.subject_id)
                return

            # peel 0th volume of each fieldmap
            zeroth_fieldmap_file = os.path.join(
                subject_data.output_dir, "0th_%s" % os.path.basename(
                    fieldmap_file))
            fslroi_cmd = "fsl5.0-fslroi %s %s 0 1" % (
                fieldmap_file, zeroth_fieldmap_file)
            print "\r\nExecuting '%s' ..." % fslroi_cmd
            print mem.cache(commands.getoutput)(fslroi_cmd)

            zeroth_fieldmap_files.append(zeroth_fieldmap_file)

        # merge the 0th volume of both fieldmaps
        merged_zeroth_fieldmap_file = os.path.join(
            subject_data.output_dir, "merged_with_other_direction_%s" % (
                os.path.basename(zeroth_fieldmap_files[0])))
        fslmerge_cmd = "fsl5.0-fslmerge -t %s %s %s" % (
            merged_zeroth_fieldmap_file, zeroth_fieldmap_files[0],
            zeroth_fieldmap_files[1])
        print "\r\nExecuting '%s' ..." % fslmerge_cmd
        print mem.cache(commands.getoutput)(fslmerge_cmd)

        # do topup (learn distortion model)
        topup_results_basename = os.path.join(subject_data.output_dir,
                                              "topup_results")
        topup_cmd = (
            "fsl5.0-topup --imain=%s --datain=%s --config=b02b0.cnf "
            "--out=%s" % (merged_zeroth_fieldmap_file, acq_params_file,
                          topup_results_basename))
        print "\r\nExecuting '%s' ..." % topup_cmd
        print mem.cache(commands.getoutput)(topup_cmd)

        # apply learn deformations to absorb distortion
        dc_fmri_files = []

        for sess in xrange(2):
            # merge SBRef + task BOLD for current PE direction
            assert len(subject_data.func) == 2, subject_data
            fourD_plus_sbref = os.path.join(
                subject_data.output_dir, "sbref_plus_" + os.path.basename(
                    subject_data.func[sess]))
            fslmerge_cmd = "fsl5.0-fslmerge -t %s %s %s" % (
                fourD_plus_sbref, sbref_files[sess], subject_data.func[sess])
            print "\r\nExecuting '%s' ..." % fslmerge_cmd
            print mem.cache(commands.getoutput)(fslmerge_cmd)

            # realign task BOLD to SBRef
            sess_output_dir = subject_data.session_output_dirs[sess]
            rfourD_plus_sbref = _do_subject_realign(SubjectData(
                    func=[fourD_plus_sbref],
                    output_dir=subject_data.output_dir,
                    n_sessions=1, session_output_dirs=[sess_output_dir]),
                                                    report=False).func[0]

            # apply topup to realigned images
            dc_rfourD_plus_sbref = os.path.join(
                subject_data.output_dir, "dc" + os.path.basename(
                    rfourD_plus_sbref))
            applytopup_cmd = (
                "fsl5.0-applytopup --imain=%s --verbose --inindex=%i "
                "--topup=%s --out=%s --datain=%s --method=jac" % (
                    rfourD_plus_sbref, sess + 1, topup_results_basename,
                    dc_rfourD_plus_sbref, acq_params_file))
            print "\r\nExecuting '%s' ..." % applytopup_cmd
            print mem.cache(commands.getoutput)(applytopup_cmd)

            # recover undistorted task BOLD
            dc_rfmri_file = dc_rfourD_plus_sbref.replace("sbref_plus_", "")
            fslroi_cmd = "fsl5.0-fslroi %s %s 1 -1" % (
                dc_rfourD_plus_sbref, dc_rfmri_file)
            print "\r\nExecuting '%s' ..." % fslroi_cmd
            print mem.cache(commands.getoutput)(fslroi_cmd)

            # sanity tricks
            if dc_rfmri_file.endswith(".nii"):
                dc_rfmri_file = dc_rfmri_file + ".gz"

            dc_fmri_files.append(dc_rfmri_file)

        subject_data.func = dc_fmri_files
        if isinstance(subject_data.func, basestring):
            subject_data.func = [subject_data.func]

    # continue preprocessing
    subject_data = do_subject_preproc(
        subject_data,
        realign=realign,
        coregister=coregister,
        coreg_anat_to_func=not coreg_func_to_anat,
        segment=True,
        normalize=False,
        report=report)

    # ok for GLM now
    return subject_data
def run_suject_level1_glm(subject_data,
                          readout_time=.01392,  # seconds
                          tr=.72,
                          dc=True,
                          hrf_model="Canonical with Derivative",
                          drift_model="Cosine",
                          hfcut=100,
                          regress_motion=True,
                          slicer='ortho',
                          cut_coords=None,
                          threshold=3.,
                          cluster_th=15,
                          normalize=True,
                          fwhm=0.,
                          protocol="MOTOR",
                          func_write_voxel_sizes=None,
                          anat_write_voxel_sizes=None,
                          **other_preproc_kwargs
                          ):
    """
    Function to do preproc + analysis for a single HCP subject (task fMRI)

    """

    add_regs_files = None
    n_motion_regressions = 6
    subject_data.n_sessions = 2

    subject_data.tmp_output_dir = os.path.join(subject_data.output_dir, "tmp")
    if not os.path.exists(subject_data.tmp_output_dir):
        os.makedirs(subject_data.tmp_output_dir)

    if not os.path.exists(subject_data.output_dir):
        os.makedirs(subject_data.output_dir)

    mem = Memory(os.path.join(subject_data.output_dir, "cache_dir"),
                 verbose=100)

    # glob design files (.fsf)
    subject_data.design_files = [os.path.join(
            subject_data.data_dir, ("MNINonLinear/Results/tfMRI_%s_%s/"
                                    "tfMRI_%s_%s_hp200_s4_level1.fsf") % (
                protocol, direction, protocol, direction))
            for direction in ['LR', 'RL']]

    assert len(subject_data.design_files) == 2
    for df in subject_data.design_files:
        assert os.path.isfile(df), df

    if 0x0:
        subject_data = _do_fmri_distortion_correction(
            subject_data, dc=dc, fwhm=fwhm, readout_time=readout_time,
            **other_preproc_kwargs)

    # chronometry
    stats_start_time = pretty_time()

    # merged lists
    paradigms = []
    frametimes_list = []
    design_matrices = []
    # fmri_files = []
    n_scans = []
    # for direction, direction_index in zip(['LR', 'RL'], xrange(2)):
    for sess in xrange(subject_data.n_sessions):
        direction = ['LR', 'RL'][sess]
        # glob the design file
        # design_file = os.path.join(# _subject_data_dir, "tfMRI_%s_%s" % (
                # protocol, direction),
        design_file = subject_data.design_files[sess]
                #                    "tfMRI_%s_%s_hp200_s4_level1.fsf" % (
                # protocol, direction))
        if not os.path.isfile(design_file):
            print "Can't find design file %s; skipping subject %s" % (
                design_file, subject_data.subject_id)
            return

        # read the experimental setup
        print "Reading experimental setup from %s ..." % design_file
        fsl_condition_ids, timing_files, fsl_contrast_ids, contrast_values = \
            read_fsl_design_file(design_file)
        print "... done.\r\n"

        # fix timing filenames
        timing_files = [tf.replace("EVs", "tfMRI_%s_%s/EVs" % (
                    protocol, direction)) for tf in timing_files]

        # make design matrix
        print "Constructing design matrix for direction %s ..." % direction
        _n_scans = nibabel.load(subject_data.func[sess]).shape[-1]
        n_scans.append(_n_scans)
        add_regs_file = add_regs_files[
            sess] if not add_regs_files is None else None
        design_matrix, paradigm, frametimes = make_dmtx_from_timing_files(
            timing_files, fsl_condition_ids, n_scans=_n_scans, tr=tr,
            hrf_model=hrf_model, drift_model=drift_model, hfcut=hfcut,
            add_regs_file=add_regs_file,
            add_reg_names=[
                'Translation along x axis',
                'Translation along yaxis',
                'Translation along z axis',
                'Rotation along x axis',
                'Rotation along y axis',
                'Rotation along z axis',
                'Differential Translation along x axis',
                'Differential Translation along yaxis',
                'Differential Translation along z axis',
                'Differential Rotation along x axis',
                'Differential Rotation along y axis',
                'Differential Rotation along z axis'
                ][:n_motion_regressions] if not add_regs_files is None
            else None,
            )

        print "... done."
        paradigms.append(paradigm)
        frametimes_list.append(frametimes)
        design_matrices.append(design_matrix)

        # convert contrasts to dict
        contrasts = dict((contrast_id,
                          # append zeros to end of contrast to match design
                          np.hstack((contrast_value, np.zeros(len(
                                design_matrix.names) - len(contrast_value)))))

                         for contrast_id, contrast_value in zip(
                fsl_contrast_ids, contrast_values))

        # more interesting contrasts
        if protocol == 'MOTOR':
            contrasts['RH-LH'] = contrasts['RH'] - contrasts['LH']
            contrasts['LH-RH'] = -contrasts['RH-LH']
            contrasts['RF-LF'] = contrasts['RF'] - contrasts['LF']
            contrasts['LF-RF'] = -contrasts['RF-LF']
            contrasts['H'] = contrasts['RH'] + contrasts['LH']
            contrasts['F'] = contrasts['RF'] + contrasts['LF']
            contrasts['H-F'] = contrasts['RH'] + contrasts['LH'] - (
                contrasts['RF'] - contrasts['LF'])
            contrasts['F-H'] = -contrasts['H-F']

        contrasts = dict((k, v) for k, v in contrasts.iteritems() if "-" in k)

    # replicate contrasts across sessions
    contrasts = dict((cid, [cval] * 2)
                     for cid, cval in contrasts.iteritems())

    cache_dir = cache_dir = os.path.join(subject_data.output_dir,
                                         'cache_dir')
    if not os.path.exists(cache_dir):
        os.makedirs(cache_dir)
    nipype_mem = NipypeMemory(base_dir=cache_dir)

    if 0x0:
        if np.sum(fwhm) > 0.:
            subject_data.func = nipype_mem.cache(spm.Smooth)(
                in_files=subject_data.func,
                fwhm=fwhm,
                ignore_exception=False,
                ).outputs.smoothed_files

    # fit GLM
    def tortoise(*args):
        print args
        print (
            'Fitting a "Fixed Effect" GLM for merging LR and RL '
            'phase-encoding directions for subject %s ...' % (
                subject_data.subject_id))
        fmri_glm = FMRILinearModel(subject_data.func,
                                   [design_matrix.matrix
                                    for design_matrix in design_matrices],
                                   mask='compute'
                                   )
        fmri_glm.fit(do_scaling=True, model='ar1')
        print "... done.\r\n"

        # save computed mask
        mask_path = os.path.join(subject_data.output_dir, "mask.nii")
        print "Saving mask image to %s ..." % mask_path
        nibabel.save(fmri_glm.mask, mask_path)
        print "... done.\r\n"

        z_maps = {}
        effects_maps = {}
        map_dirs = {}
        for contrast_id, contrast_val in contrasts.iteritems():
            print "\tcontrast id: %s" % contrast_id
            z_map, eff_map = fmri_glm.contrast(
                contrast_val,
                con_id=contrast_id,
                output_z=True,
                output_effects=True
                )

            # store stat maps to disk
            for map_type, out_map in zip(['z', 'effects'],
                                         [z_map, eff_map]):
                map_dir = os.path.join(
                    subject_data.output_dir, '%s_maps' % map_type)
                map_dirs[map_type] = map_dir
                if not os.path.exists(map_dir):
                    os.makedirs(map_dir)
                map_path = os.path.join(
                    map_dir, '%s_%s.nii' % (map_type, contrast_id))
                print "\t\tWriting %s ..." % map_path

                nibabel.save(out_map, map_path)

                # collect zmaps for contrasts we're interested in
                if map_type == 'z':
                    z_maps[contrast_id] = map_path

                if map_type == 'effects':
                    effects_maps[contrast_id] = map_path

        return effects_maps, z_maps, mask_path, map_dirs

    # compute native-space maps and mask
    effects_maps, z_maps, mask_path, map_dirs = mem.cache(tortoise)(
        subject_data.func, subject_data.anat)

    # do stats report
    if 0x0:
        anat_img = nibabel.load(subject_data.anat)
        stats_report_filename = os.path.join(subject_data.output_dir,
                                             "reports",
                                             "report_stats.html")
        generate_subject_stats_report(
            stats_report_filename,
            contrasts,
            z_maps,
            nibabel.load(mask_path),
            anat=anat_img.get_data(),
            anat_affine=anat_img.get_affine(),
            threshold=threshold,
            cluster_th=cluster_th,
            slicer=slicer,
            cut_coords=cut_coords,
            design_matrices=design_matrices,
            subject_id=subject_data.subject_id,
            start_time=stats_start_time,
            title="GLM for subject %s" % subject_data.subject_id,

            # additional ``kwargs`` for more informative report
            TR=tr,
            n_scans=n_scans,
            hfcut=hfcut,
            drift_model=drift_model,
            hrf_model=hrf_model,
            paradigm={'LR': paradigms[0].__dict__,
                      'RL': paradigms[1].__dict__},
            frametimes={'LR': frametimes_list[0], 'RL': frametimes_list[1]},
            fwhm=fwhm
            )

        ProgressReport().finish_dir(subject_data.output_dir)
        print "\r\nStatistic report written to %s\r\n" % stats_report_filename

    # remove repeated contrasts
    contrasts = dict((cid, cval[0]) for cid, cval in contrasts.iteritems())
    import json
    json.dump(dict((k, list(v)) for k, v in contrasts.iteritems()),
              open(os.path.join(subject_data.tmp_output_dir,
                                "contrasts.json"), "w"))
    subject_data.contrasts = contrasts

    if normalize:
        assert hasattr(subject_data, "parameter_file")

        subject_data.native_effects_maps = effects_maps
        subject_data.native_z_maps = z_maps
        subject_data.native_mask_path = mask_path

        # warp effects maps and mask from native to standard space (MNI)
        apply_to_files = [
            v for _, v in subject_data.native_effects_maps.iteritems()
            ] + [subject_data.native_mask_path]
        tmp = nipype_mem.cache(spm.Normalize)(
            parameter_file=getattr(subject_data, "parameter_file"),
            apply_to_files=apply_to_files,
            write_bounding_box=[[-78, -112, -50], [78, 76, 85]],
            write_voxel_sizes=func_write_voxel_sizes,
            write_wrap=[0, 0, 0],
            write_interp=1,
            jobtype='write',
            ignore_exception=False,
            ).outputs.normalized_files

        subject_data.mask = hard_link(tmp[-1], subject_data.output_dir)
        subject_data.effects_maps = dict(zip(effects_maps.keys(), hard_link(
                    tmp[:-1], map_dirs["effects"])))

        # warp anat image
        subject_data.anat = hard_link(nipype_mem.cache(spm.Normalize)(
                parameter_file=getattr(subject_data, "parameter_file"),
                apply_to_files=subject_data.anat,
                write_bounding_box=[[-78, -112, -50], [78, 76, 85]],
                write_voxel_sizes=anat_write_voxel_sizes,
                write_wrap=[0, 0, 0],
                write_interp=1,
                jobtype='write',
                ignore_exception=False,
                ).outputs.normalized_files, subject_data.anat_output_dir)
    else:
        subject_data.mask = mask_path
        subject_data.effects_maps = effects_maps
        subject_data.z_maps = z_maps

    return subject_data