def run_suject_level1_glm(
        subject_data,
        readout_time=.01392,  # seconds
        tr=.72,
        dc=True,
        hrf_model="Canonical with Derivative",
        drift_model="Cosine",
        hfcut=100,
        regress_motion=True,
        slicer='ortho',
        cut_coords=None,
        threshold=3.,
        cluster_th=15,
        normalize=True,
        fwhm=0.,
        protocol="MOTOR",
        func_write_voxel_sizes=None,
        anat_write_voxel_sizes=None,
        **other_preproc_kwargs):
    """
    Function to do preproc + analysis for a single HCP subject (task fMRI)

    """

    add_regs_files = None
    n_motion_regressions = 6
    subject_data.n_sessions = 2

    subject_data.tmp_output_dir = os.path.join(subject_data.output_dir, "tmp")
    if not os.path.exists(subject_data.tmp_output_dir):
        os.makedirs(subject_data.tmp_output_dir)

    if not os.path.exists(subject_data.output_dir):
        os.makedirs(subject_data.output_dir)

    mem = Memory(os.path.join(subject_data.output_dir, "cache_dir"),
                 verbose=100)

    # glob design files (.fsf)
    subject_data.design_files = [
        os.path.join(subject_data.data_dir,
                     ("MNINonLinear/Results/tfMRI_%s_%s/"
                      "tfMRI_%s_%s_hp200_s4_level1.fsf") %
                     (protocol, direction, protocol, direction))
        for direction in ['LR', 'RL']
    ]

    assert len(subject_data.design_files) == 2
    for df in subject_data.design_files:
        if not os.path.isfile(df):
            return

    if 0x0:
        subject_data = _do_fmri_distortion_correction(
            subject_data,
            dc=dc,
            fwhm=fwhm,
            readout_time=readout_time,
            **other_preproc_kwargs)

    # chronometry
    stats_start_time = pretty_time()

    # merged lists
    paradigms = []
    frametimes_list = []
    design_matrices = []
    # fmri_files = []
    n_scans = []
    # for direction, direction_index in zip(['LR', 'RL'], xrange(2)):
    for sess in xrange(subject_data.n_sessions):
        direction = ['LR', 'RL'][sess]
        # glob the design file
        # design_file = os.path.join(# _subject_data_dir, "tfMRI_%s_%s" % (
        # protocol, direction),
        design_file = subject_data.design_files[sess]
        #                    "tfMRI_%s_%s_hp200_s4_level1.fsf" % (
        # protocol, direction))
        if not os.path.isfile(design_file):
            print "Can't find design file %s; skipping subject %s" % (
                design_file, subject_data.subject_id)
            return

        # read the experimental setup
        print "Reading experimental setup from %s ..." % design_file
        fsl_condition_ids, timing_files, fsl_contrast_ids, contrast_values = \
            read_fsl_design_file(design_file)
        print "... done.\r\n"

        # fix timing filenames
        timing_files = [
            tf.replace("EVs", "tfMRI_%s_%s/EVs" % (protocol, direction))
            for tf in timing_files
        ]

        # make design matrix
        print "Constructing design matrix for direction %s ..." % direction
        _n_scans = nibabel.load(subject_data.func[sess]).shape[-1]
        n_scans.append(_n_scans)
        add_regs_file = add_regs_files[
            sess] if not add_regs_files is None else None
        design_matrix, paradigm, frametimes = make_dmtx_from_timing_files(
            timing_files,
            fsl_condition_ids,
            n_scans=_n_scans,
            tr=tr,
            hrf_model=hrf_model,
            drift_model=drift_model,
            hfcut=hfcut,
            add_regs_file=add_regs_file,
            add_reg_names=[
                'Translation along x axis', 'Translation along yaxis',
                'Translation along z axis', 'Rotation along x axis',
                'Rotation along y axis', 'Rotation along z axis',
                'Differential Translation along x axis',
                'Differential Translation along yaxis',
                'Differential Translation along z axis',
                'Differential Rotation along x axis',
                'Differential Rotation along y axis',
                'Differential Rotation along z axis'
            ][:n_motion_regressions] if not add_regs_files is None else None,
        )

        print "... done."
        paradigms.append(paradigm)
        frametimes_list.append(frametimes)
        design_matrices.append(design_matrix)

        # convert contrasts to dict
        contrasts = dict((
            contrast_id,
            # append zeros to end of contrast to match design
            np.hstack((
                contrast_value,
                np.zeros(len(design_matrix.names) - len(contrast_value)))))
                         for contrast_id, contrast_value in zip(
                             fsl_contrast_ids, contrast_values))

        # more interesting contrasts
        if protocol == 'MOTOR':
            contrasts['RH-LH'] = contrasts['RH'] - contrasts['LH']
            contrasts['LH-RH'] = -contrasts['RH-LH']
            contrasts['RF-LF'] = contrasts['RF'] - contrasts['LF']
            contrasts['LF-RF'] = -contrasts['RF-LF']
            contrasts['H'] = contrasts['RH'] + contrasts['LH']
            contrasts['F'] = contrasts['RF'] + contrasts['LF']
            contrasts['H-F'] = contrasts['RH'] + contrasts['LH'] - (
                contrasts['RF'] - contrasts['LF'])
            contrasts['F-H'] = -contrasts['H-F']

        contrasts = dict((k, v) for k, v in contrasts.iteritems() if "-" in k)

    # replicate contrasts across sessions
    contrasts = dict((cid, [cval] * 2) for cid, cval in contrasts.iteritems())

    cache_dir = cache_dir = os.path.join(subject_data.output_dir, 'cache_dir')
    if not os.path.exists(cache_dir):
        os.makedirs(cache_dir)
    nipype_mem = NipypeMemory(base_dir=cache_dir)

    if 0x0:
        if np.sum(fwhm) > 0.:
            subject_data.func = nipype_mem.cache(spm.Smooth)(
                in_files=subject_data.func,
                fwhm=fwhm,
                ignore_exception=False,
            ).outputs.smoothed_files

    # fit GLM
    def tortoise(*args):
        print args
        print(
            'Fitting a "Fixed Effect" GLM for merging LR and RL '
            'phase-encoding directions for subject %s ...' %
            (subject_data.subject_id))
        fmri_glm = FMRILinearModel(
            subject_data.func,
            [design_matrix.matrix for design_matrix in design_matrices],
            mask='compute')
        fmri_glm.fit(do_scaling=True, model='ar1')
        print "... done.\r\n"

        # save computed mask
        mask_path = os.path.join(subject_data.output_dir, "mask.nii")
        print "Saving mask image to %s ..." % mask_path
        nibabel.save(fmri_glm.mask, mask_path)
        print "... done.\r\n"

        z_maps = {}
        effects_maps = {}
        map_dirs = {}
        try:
            for contrast_id, contrast_val in contrasts.iteritems():
                print "\tcontrast id: %s" % contrast_id
                z_map, eff_map = fmri_glm.contrast(contrast_val,
                                                   con_id=contrast_id,
                                                   output_z=True,
                                                   output_effects=True)

                # store stat maps to disk
                for map_type, out_map in zip(['z', 'effects'],
                                             [z_map, eff_map]):
                    map_dir = os.path.join(subject_data.output_dir,
                                           '%s_maps' % map_type)
                    map_dirs[map_type] = map_dir
                    if not os.path.exists(map_dir):
                        os.makedirs(map_dir)
                    map_path = os.path.join(
                        map_dir, '%s_%s.nii' % (map_type, contrast_id))
                    print "\t\tWriting %s ..." % map_path

                    nibabel.save(out_map, map_path)

                    # collect zmaps for contrasts we're interested in
                    if map_type == 'z':
                        z_maps[contrast_id] = map_path

                    if map_type == 'effects':
                        effects_maps[contrast_id] = map_path

            return effects_maps, z_maps, mask_path, map_dirs
        except:
            return None

    # compute native-space maps and mask
    stuff = mem.cache(tortoise)(subject_data.func, subject_data.anat)
    if stuff is None:
        return None
    effects_maps, z_maps, mask_path, map_dirs = stuff

    # remove repeated contrasts
    contrasts = dict((cid, cval[0]) for cid, cval in contrasts.iteritems())
    import json
    json.dump(
        dict((k, list(v)) for k, v in contrasts.iteritems()),
        open(os.path.join(subject_data.tmp_output_dir, "contrasts.json"), "w"))
    subject_data.contrasts = contrasts

    if normalize:
        assert hasattr(subject_data, "parameter_file")

        subject_data.native_effects_maps = effects_maps
        subject_data.native_z_maps = z_maps
        subject_data.native_mask_path = mask_path

        # warp effects maps and mask from native to standard space (MNI)
        apply_to_files = [
            v for _, v in subject_data.native_effects_maps.iteritems()
        ] + [subject_data.native_mask_path]
        tmp = nipype_mem.cache(spm.Normalize)(
            parameter_file=getattr(subject_data, "parameter_file"),
            apply_to_files=apply_to_files,
            write_bounding_box=[[-78, -112, -50], [78, 76, 85]],
            write_voxel_sizes=func_write_voxel_sizes,
            write_wrap=[0, 0, 0],
            write_interp=1,
            jobtype='write',
            ignore_exception=False,
        ).outputs.normalized_files

        subject_data.mask = hard_link(tmp[-1], subject_data.output_dir)
        subject_data.effects_maps = dict(
            zip(effects_maps.keys(), hard_link(tmp[:-1], map_dirs["effects"])))

        # warp anat image
        subject_data.anat = hard_link(
            nipype_mem.cache(spm.Normalize)(
                parameter_file=getattr(subject_data, "parameter_file"),
                apply_to_files=subject_data.anat,
                write_bounding_box=[[-78, -112, -50], [78, 76, 85]],
                write_voxel_sizes=anat_write_voxel_sizes,
                write_wrap=[0, 0, 0],
                write_interp=1,
                jobtype='write',
                ignore_exception=False,
            ).outputs.normalized_files, subject_data.anat_output_dir)
    else:
        subject_data.mask = mask_path
        subject_data.effects_maps = effects_maps
        subject_data.z_maps = z_maps

    return subject_data
def run_suject_level1_glm(subject_data,
                          readout_time=.01392,  # seconds
                          tr=.72,
                          dc=True,
                          hrf_model="Canonical with Derivative",
                          drift_model="Cosine",
                          hfcut=100,
                          regress_motion=True,
                          slicer='ortho',
                          cut_coords=None,
                          threshold=3.,
                          cluster_th=15,
                          normalize=True,
                          fwhm=0.,
                          protocol="MOTOR",
                          func_write_voxel_sizes=None,
                          anat_write_voxel_sizes=None,
                          **other_preproc_kwargs
                          ):
    """
    Function to do preproc + analysis for a single HCP subject (task fMRI)

    """

    add_regs_files = None
    n_motion_regressions = 6
    subject_data.n_sessions = 2

    subject_data.tmp_output_dir = os.path.join(subject_data.output_dir, "tmp")
    if not os.path.exists(subject_data.tmp_output_dir):
        os.makedirs(subject_data.tmp_output_dir)

    if not os.path.exists(subject_data.output_dir):
        os.makedirs(subject_data.output_dir)

    mem = Memory(os.path.join(subject_data.output_dir, "cache_dir"),
                 verbose=100)

    # glob design files (.fsf)
    subject_data.design_files = [os.path.join(
            subject_data.data_dir, ("MNINonLinear/Results/tfMRI_%s_%s/"
                                    "tfMRI_%s_%s_hp200_s4_level1.fsf") % (
                protocol, direction, protocol, direction))
            for direction in ['LR', 'RL']]

    assert len(subject_data.design_files) == 2
    for df in subject_data.design_files:
        if not os.path.isfile(df):
            return

    if 0x0:
        subject_data = _do_fmri_distortion_correction(
            subject_data, dc=dc, fwhm=fwhm, readout_time=readout_time,
            **other_preproc_kwargs)

    # chronometry
    stats_start_time = pretty_time()

    # merged lists
    paradigms = []
    frametimes_list = []
    design_matrices = []
    # fmri_files = []
    n_scans = []
    # for direction, direction_index in zip(['LR', 'RL'], xrange(2)):
    for sess in xrange(subject_data.n_sessions):
        direction = ['LR', 'RL'][sess]
        # glob the design file
        # design_file = os.path.join(# _subject_data_dir, "tfMRI_%s_%s" % (
                # protocol, direction),
        design_file = subject_data.design_files[sess]
                #                    "tfMRI_%s_%s_hp200_s4_level1.fsf" % (
                # protocol, direction))
        if not os.path.isfile(design_file):
            print "Can't find design file %s; skipping subject %s" % (
                design_file, subject_data.subject_id)
            return

        # read the experimental setup
        print "Reading experimental setup from %s ..." % design_file
        fsl_condition_ids, timing_files, fsl_contrast_ids, contrast_values = \
            read_fsl_design_file(design_file)
        print "... done.\r\n"

        # fix timing filenames
        timing_files = [tf.replace("EVs", "tfMRI_%s_%s/EVs" % (
                    protocol, direction)) for tf in timing_files]

        # make design matrix
        print "Constructing design matrix for direction %s ..." % direction
        _n_scans = nibabel.load(subject_data.func[sess]).shape[-1]
        n_scans.append(_n_scans)
        add_regs_file = add_regs_files[
            sess] if not add_regs_files is None else None
        design_matrix, paradigm, frametimes = make_dmtx_from_timing_files(
            timing_files, fsl_condition_ids, n_scans=_n_scans, tr=tr,
            hrf_model=hrf_model, drift_model=drift_model, hfcut=hfcut,
            add_regs_file=add_regs_file,
            add_reg_names=[
                'Translation along x axis',
                'Translation along yaxis',
                'Translation along z axis',
                'Rotation along x axis',
                'Rotation along y axis',
                'Rotation along z axis',
                'Differential Translation along x axis',
                'Differential Translation along yaxis',
                'Differential Translation along z axis',
                'Differential Rotation along x axis',
                'Differential Rotation along y axis',
                'Differential Rotation along z axis'
                ][:n_motion_regressions] if not add_regs_files is None
            else None,
            )

        print "... done."
        paradigms.append(paradigm)
        frametimes_list.append(frametimes)
        design_matrices.append(design_matrix)

        # convert contrasts to dict
        contrasts = dict((contrast_id,
                          # append zeros to end of contrast to match design
                          np.hstack((contrast_value, np.zeros(len(
                                design_matrix.names) - len(contrast_value)))))

                         for contrast_id, contrast_value in zip(
                fsl_contrast_ids, contrast_values))

        # more interesting contrasts
        if protocol == 'MOTOR':
            contrasts['RH-LH'] = contrasts['RH'] - contrasts['LH']
            contrasts['LH-RH'] = -contrasts['RH-LH']
            contrasts['RF-LF'] = contrasts['RF'] - contrasts['LF']
            contrasts['LF-RF'] = -contrasts['RF-LF']
            contrasts['H'] = contrasts['RH'] + contrasts['LH']
            contrasts['F'] = contrasts['RF'] + contrasts['LF']
            contrasts['H-F'] = contrasts['RH'] + contrasts['LH'] - (
                contrasts['RF'] - contrasts['LF'])
            contrasts['F-H'] = -contrasts['H-F']

        contrasts = dict((k, v) for k, v in contrasts.iteritems() if "-" in k)

    # replicate contrasts across sessions
    contrasts = dict((cid, [cval] * 2)
                     for cid, cval in contrasts.iteritems())

    cache_dir = cache_dir = os.path.join(subject_data.output_dir,
                                         'cache_dir')
    if not os.path.exists(cache_dir):
        os.makedirs(cache_dir)
    nipype_mem = NipypeMemory(base_dir=cache_dir)

    if 0x0:
        if np.sum(fwhm) > 0.:
            subject_data.func = nipype_mem.cache(spm.Smooth)(
                in_files=subject_data.func,
                fwhm=fwhm,
                ignore_exception=False,
                ).outputs.smoothed_files

    # fit GLM
    def tortoise(*args):
        print args
        print (
            'Fitting a "Fixed Effect" GLM for merging LR and RL '
            'phase-encoding directions for subject %s ...' % (
                subject_data.subject_id))
        fmri_glm = FMRILinearModel(subject_data.func,
                                   [design_matrix.matrix
                                    for design_matrix in design_matrices],
                                   mask='compute'
                                   )
        fmri_glm.fit(do_scaling=True, model='ar1')
        print "... done.\r\n"

        # save computed mask
        mask_path = os.path.join(subject_data.output_dir, "mask.nii")
        print "Saving mask image to %s ..." % mask_path
        nibabel.save(fmri_glm.mask, mask_path)
        print "... done.\r\n"

        z_maps = {}
        effects_maps = {}
        map_dirs = {}
        try:
            for contrast_id, contrast_val in contrasts.iteritems():
                print "\tcontrast id: %s" % contrast_id
                z_map, eff_map = fmri_glm.contrast(
                    contrast_val,
                    con_id=contrast_id,
                    output_z=True,
                    output_effects=True
                    )

                # store stat maps to disk
                for map_type, out_map in zip(['z', 'effects'],
                                             [z_map, eff_map]):
                    map_dir = os.path.join(
                        subject_data.output_dir, '%s_maps' % map_type)
                    map_dirs[map_type] = map_dir
                    if not os.path.exists(map_dir):
                        os.makedirs(map_dir)
                    map_path = os.path.join(
                        map_dir, '%s_%s.nii' % (map_type, contrast_id))
                    print "\t\tWriting %s ..." % map_path

                    nibabel.save(out_map, map_path)

                    # collect zmaps for contrasts we're interested in
                    if map_type == 'z':
                        z_maps[contrast_id] = map_path

                    if map_type == 'effects':
                        effects_maps[contrast_id] = map_path

            return effects_maps, z_maps, mask_path, map_dirs
        except:
            return None

    # compute native-space maps and mask
    stuff = mem.cache(tortoise)(
        subject_data.func, subject_data.anat)
    if stuff is None:
        return None
    effects_maps, z_maps, mask_path, map_dirs = stuff

    # remove repeated contrasts
    contrasts = dict((cid, cval[0]) for cid, cval in contrasts.iteritems())
    import json
    json.dump(dict((k, list(v)) for k, v in contrasts.iteritems()),
              open(os.path.join(subject_data.tmp_output_dir,
                                "contrasts.json"), "w"))
    subject_data.contrasts = contrasts

    if normalize:
        assert hasattr(subject_data, "parameter_file")

        subject_data.native_effects_maps = effects_maps
        subject_data.native_z_maps = z_maps
        subject_data.native_mask_path = mask_path

        # warp effects maps and mask from native to standard space (MNI)
        apply_to_files = [
            v for _, v in subject_data.native_effects_maps.iteritems()
            ] + [subject_data.native_mask_path]
        tmp = nipype_mem.cache(spm.Normalize)(
            parameter_file=getattr(subject_data, "parameter_file"),
            apply_to_files=apply_to_files,
            write_bounding_box=[[-78, -112, -50], [78, 76, 85]],
            write_voxel_sizes=func_write_voxel_sizes,
            write_wrap=[0, 0, 0],
            write_interp=1,
            jobtype='write',
            ignore_exception=False,
            ).outputs.normalized_files

        subject_data.mask = hard_link(tmp[-1], subject_data.output_dir)
        subject_data.effects_maps = dict(zip(effects_maps.keys(), hard_link(
                    tmp[:-1], map_dirs["effects"])))

        # warp anat image
        subject_data.anat = hard_link(nipype_mem.cache(spm.Normalize)(
                parameter_file=getattr(subject_data, "parameter_file"),
                apply_to_files=subject_data.anat,
                write_bounding_box=[[-78, -112, -50], [78, 76, 85]],
                write_voxel_sizes=anat_write_voxel_sizes,
                write_wrap=[0, 0, 0],
                write_interp=1,
                jobtype='write',
                ignore_exception=False,
                ).outputs.normalized_files, subject_data.anat_output_dir)
    else:
        subject_data.mask = mask_path
        subject_data.effects_maps = effects_maps
        subject_data.z_maps = z_maps

    return subject_data