Exemplo n.º 1
0
def make_design_matrices(onsets,
                         n_scans,
                         tr,
                         motion=None,
                         hrf_model='canonical with derivative',
                         drift_model='cosine',
                         orthogonalize=None):
    design_matrices = []
    for i, onset in enumerate(onsets):
        if n_scans[i] == 0:
            design_matrices.append(None)
        onset = np.array(onset)
        labels = onset[:, 0]
        time = onset[:, 1].astype('float')
        duration = onset[:, 2].astype('float')
        amplitude = onset[:, 3].astype('float')

        if duration.sum() == 0:
            paradigm = EventRelatedParadigm(labels, time, amplitude)
        else:
            paradigm = BlockParadigm(labels, time, duration, amplitude)

        frametimes = np.linspace(0, (n_scans[i] - 1) * tr, n_scans[i])

        if motion is not None:
            add_regs = np.array(motion[i]).astype('float')
            add_reg_names = ['motion_%i' % r for r in range(add_regs.shape[1])]
            design_matrix = make_dmtx(frametimes,
                                      paradigm,
                                      hrf_model=hrf_model,
                                      drift_model=drift_model,
                                      add_regs=add_regs,
                                      add_reg_names=add_reg_names)
        else:
            design_matrix = make_dmtx(frametimes,
                                      paradigm,
                                      hrf_model=hrf_model,
                                      drift_model=drift_model)

        if orthogonalize is not None:
            if 'derivative' in hrf_model:
                raise Exception(
                    'Orthogonalization not supported with hrf derivative.')
            orth = orthogonalize[i]
            if orth is not None:
                for x, y in orth:
                    x_ = design_matrix.matrix[:, x]
                    y_ = design_matrix.matrix[:, y]
                    z = orthogonalize_vectors(x_, y_)
                    design_matrix.matrix[:, x] = z

        design_matrices.append(design_matrix.matrix)

    return design_matrices
Exemplo n.º 2
0
def make_design_matrices(onsets, n_scans, tr, motion=None,
                         hrf_model='canonical with derivative',
                         drift_model='cosine', orthogonalize=None):
    design_matrices = []
    for i, onset in enumerate(onsets):
        if n_scans[i] == 0:
            design_matrices.append(None)
        onset = np.array(onset)
        labels = onset[:, 0]
        time = onset[:, 1].astype('float')
        duration = onset[:, 2].astype('float')
        amplitude = onset[:, 3].astype('float')

        if duration.sum() == 0:
            paradigm = EventRelatedParadigm(labels, time, amplitude)
        else:
            paradigm = BlockParadigm(labels, time, duration, amplitude)

        frametimes = np.linspace(0, (n_scans[i] - 1) * tr, n_scans[i])

        if motion is not None:
            add_regs = np.array(motion[i]).astype('float')
            add_reg_names = ['motion_%i' % r
                             for r in range(add_regs.shape[1])]
            design_matrix = make_dmtx(
                frametimes, paradigm, hrf_model=hrf_model,
                drift_model=drift_model,
                add_regs=add_regs, add_reg_names=add_reg_names)
        else:
            design_matrix = make_dmtx(
                frametimes, paradigm, hrf_model=hrf_model,
                drift_model=drift_model)

        if orthogonalize is not None:
            if 'derivative' in hrf_model:
                raise Exception(
                    'Orthogonalization not supported with hrf derivative.')
            orth = orthogonalize[i]
            if orth is not None:
                for x, y in orth:
                    x_ = design_matrix.matrix[:, x]
                    y_ = design_matrix.matrix[:, y]
                    z = orthogonalize_vectors(x_, y_)
                    design_matrix.matrix[:, x] = z

        design_matrices.append(design_matrix.matrix)

    return design_matrices
Exemplo n.º 3
0
def make_design(folder_name):
    files = [f for f in listdir(folder_name) if isfile(join(folder_name, f))]
    design_files = []
    for i, run_file in files:
        name = "design-{}".format(i)
        tr = 0.2
        path = os.path.join(folder_name, run_file)
        img = nib.load(path)
        data = img.get_data()
        n_scans = data.shape[-1]
        frametimes = np.arange(0, n_scans * tr, tr)
        y = make_dmtx(frametimes,
                      paradigm=None,
                      hrf_model='canonical',
                      drift_model='cosine',
                      hfcut=128,
                      drift_order=1,
                      fir_delays=[0],
                      add_regs=None,
                      add_reg_names=None,
                      min_onset=0)

        np.savez(name, y.matrix)
        design_files.append(name)

    return design_files
Exemplo n.º 4
0
def make_design_matrices(events, n_scans, tr, hrf_model="canonical", drift_model="cosine", motion=None, orth=None):
    design_matrices = []
    n_sessions = len(n_scans)

    for i in range(n_sessions):
        onsets = events[i][0][:, 0]
        duration = events[i][0][:, 1]
        amplitude = events[i][0][:, 2]
        trials = events[i][1]
        order = np.argsort(onsets)

        # make a block or event paradigm depending on stimulus duration
        if duration.sum() == 0:
            paradigm = EventRelatedParadigm(trials[order], onsets[order], amplitude[order])
        else:
            paradigm = BlockParadigm(trials[order], onsets[order], duration[order], amplitude[order])

        frametimes = np.linspace(0, (n_scans[i] - 1) * tr, n_scans[i])

        if motion is not None:
            add_regs = np.array(motion[i]).astype("float")
            add_reg_names = ["motion_%i" % r for r in range(add_regs.shape[1])]

            design_matrix = make_dmtx(
                frametimes,
                paradigm,
                hrf_model=hrf_model,
                drift_model=drift_model,
                add_regs=add_regs,
                add_reg_names=add_reg_names,
            )
        else:
            design_matrix = make_dmtx(frametimes, paradigm, hrf_model=hrf_model, drift_model=drift_model)

        if orth is not None:
            session_orth = orth[i]
            if session_orth is not None:
                for x, y in session_orth:
                    reg = orthogonalize_regressors(design_matrix.matrix[:, x], design_matrix.matrix[:, y])
                    design_matrix.matrix[:, x] = reg

        design_matrices.append(design_matrix)

    return design_matrices
Exemplo n.º 5
0
def make_design_matrices(events, n_scans, tr, hrf_model='canonical',
                         drift_model='cosine', motion=None):

    design_matrices = []
    n_sessions = len(n_scans)

    for i in range(n_sessions):

        onsets = events[i][0][:, 0]
        duration = events[i][0][:, 1]
        amplitude = events[i][0][:, 2]
        cond_id = events[i][1]
        order = np.argsort(onsets)

        # make a block or event paradigm depending on stimulus duration
        if duration.sum() == 0:
            paradigm = EventRelatedParadigm(cond_id[order],
                                            onsets[order],
                                            amplitude[order])
        else:
            paradigm = BlockParadigm(cond_id[order], onsets[order],
                                     duration[order], amplitude[order])

        frametimes = np.linspace(0, (n_scans[i] - 1) * tr, n_scans[i])

        if motion is not None:
            add_regs = np.array(motion[i]).astype('float')
            add_reg_names = ['motion_%i' % r
                             for r in range(add_regs.shape[1])]

            design_matrix = make_dmtx(
                frametimes, paradigm, hrf_model=hrf_model,
                drift_model=drift_model,
                add_regs=add_regs, add_reg_names=add_reg_names)
        else:
            design_matrix = make_dmtx(
                frametimes, paradigm, hrf_model=hrf_model,
                drift_model=drift_model)

        design_matrices.append(design_matrix.matrix)

    return design_matrices
Exemplo n.º 6
0
def make_dmtx_from_timing_files(timing_files,
                                condition_ids=None,
                                frametimes=None,
                                n_scans=None,
                                tr=None,
                                add_regs_file=None,
                                add_reg_names=None,
                                **make_dmtx_kwargs):
    # make paradigm
    paradigm = make_paradigm_from_timing_files(timing_files,
                                               condition_ids=condition_ids)

    # make frametimes
    if frametimes is None:
        assert not n_scans is None, ("frametimes not specified, especting a "
                                     "value for n_scans")
        assert not tr is None, ("frametimes not specified, especting a "
                                "value for tr")
        frametimes = np.linspace(0, (n_scans - 1) * tr, n_scans)
    else:
        assert n_scans is None, ("frametimes specified, not especting a "
                                 "value for n_scans")
        assert tr is None, ("frametimes specified, not especting a "
                            "value for tr")

    # load addition regressors from file
    if not add_regs_file is None:
        if isinstance(add_regs_file, np.ndarray):
            add_regs = add_regs_file
        else:
            assert os.path.isfile(add_regs_file), (
                "add_regs_file %s doesn't exist")
            add_regs = np.loadtxt(add_regs_file)
        assert add_regs.ndim == 2, (
            "Bad add_regs_file: %s (must contain a 2D array, each column "
            "representing the values of a single regressor)" % add_regs_file)
        if add_reg_names is None:
            add_reg_names = [
                "R%i" % (col + 1) for col in xrange(add_regs.shape[-1])
            ]
        else:
            assert len(add_reg_names) == add_regs.shape[1], (
                "Expecting %i regressor names, got %i" %
                (add_regs.shape[1], len(add_reg_names)))

        make_dmtx_kwargs["add_reg_names"] = add_reg_names
        make_dmtx_kwargs["add_regs"] = add_regs

    # make design matrix
    design_matrix = make_dmtx(frametimes, paradigm, **make_dmtx_kwargs)

    # return output
    return design_matrix, paradigm, frametimes
Exemplo n.º 7
0
def make_dmtx_from_timing_files(timing_files, condition_ids=None,
                                frametimes=None, n_scans=None, tr=None,
                                add_regs_file=None,
                                add_reg_names=None,
                                **make_dmtx_kwargs):
    # make paradigm
    paradigm = make_paradigm_from_timing_files(timing_files,
                                               condition_ids=condition_ids)

    # make frametimes
    if frametimes is None:
        assert not n_scans is None, ("frametimes not specified, especting a "
                                     "value for n_scans")
        assert not tr is None, ("frametimes not specified, especting a "
                                "value for tr")
        frametimes = np.linspace(0, (n_scans - 1) * tr, n_scans)
    else:
        assert n_scans is None, ("frametimes specified, not especting a "
                                 "value for n_scans")
        assert tr is None, ("frametimes specified, not especting a "
                                 "value for tr")

    # load addition regressors from file
    if not add_regs_file is None:
        if isinstance(add_regs_file, np.ndarray):
            add_regs = add_regs_file
        else:
            assert os.path.isfile(add_regs_file), (
                "add_regs_file %s doesn't exist")
            add_regs = np.loadtxt(add_regs_file)
        assert add_regs.ndim == 2, (
            "Bad add_regs_file: %s (must contain a 2D array, each column "
            "representing the values of a single regressor)" % add_regs_file)
        if add_reg_names is None:
            add_reg_names = ["R%i" % (col + 1) for col in range(
                    add_regs.shape[-1])]
        else:
            assert len(add_reg_names) == add_regs.shape[1], (
                "Expecting %i regressor names, got %i" % (
                    add_regs.shape[1], len(add_reg_names)))

        make_dmtx_kwargs["add_reg_names"] = add_reg_names
        make_dmtx_kwargs["add_regs"] = add_regs

    # make design matrix
    design_matrix = make_dmtx(frametimes, paradigm, **make_dmtx_kwargs)

    # return output
    return design_matrix, paradigm, frametimes
Exemplo n.º 8
0
def preprocess_encoding(ds, events, c, mp=None, design_kwargs=None):
    if isinstance(c, basestring):
        c = [c]
    from nipy.modalities.fmri.design_matrix import make_dmtx
    if design_kwargs is None:
        design_kwargs = {'hrf_model': 'canonical', 'drift_model': 'blank'}
    des_dict, rds = make_designmat(ds,
                                   events,
                                   time_attr='time_coords',
                                   condition_attr=c,
                                   design_kwargs=design_kwargs,
                                   regr_attrs=None)
    if mp is not None:
        des_dict['motion'] = make_dmtx(ds.sa['time_coords'].value,
                                       paradigm=None,
                                       add_regs=mp,
                                       drift_model='blank')

    des = make_parammat(des_dict, hrf='canonical', zscore=True)
    return rds, des
Exemplo n.º 9
0
def make_designmat(frametimes,
                   cond_ids,
                   onsets,
                   durations,
                   amplitudes=None,
                   design_kwargs=None,
                   constant=False,
                   logger=None):
    """
    Creates design matrix from TSV columns
    :param frametimes: time index (in s) of each TR
    :param cond_ids: condition ids. each unique string will become a regressor
    :param onsets: condition onsets
    :param durations: durations of trials
    :param amplitudes: amplitude of trials (default None)
    :param design_kwargs: additional arguments(motion parameters, HRF, etc)
    :param logger: logger instance
    :return: design matrix instance
    """
    if design_kwargs is None:
        design_kwargs = {}
    if "drift_model" not in design_kwargs.keys():
        design_kwargs["drift_model"] = "blank"

    from nipy.modalities.fmri.design_matrix import make_dmtx
    from nipy.modalities.fmri.experimental_paradigm import BlockParadigm

    write_to_logger("Creating design matrix...", logger)
    paradigm = BlockParadigm(con_id=cond_ids,
                             onset=onsets,
                             duration=durations,
                             amplitude=amplitudes)
    dm = make_dmtx(frametimes, paradigm, **design_kwargs)
    if constant is False:
        import numpy as np
        dm.matrix = np.delete(dm.matrix, dm.names.index("constant"), axis=1)
        dm.names = dm.names.remove("constant")
    return dm
def _preprocess_and_analysis_subject(subject_data,
                                     do_normalize=False,
                                     fwhm=0.,
                                     slicer='z',
                                     cut_coords=6,
                                     threshold=3.,
                                     cluster_th=15
                                     ):
    """
    Preprocesses the subject and then fits (mass-univariate) GLM thereupon.

    """

    # sanitize run_ids:
    # Sub14/BOLD/Run_02/fMR09029-0004-00010-000010-01.nii is garbage,

    # for example
    run_ids = range(9)
    if subject_data['subject_id'] == "Sub14":
        run_ids = [0] + range(2, 9)
        subject_data['func'] = [subject_data['func'][0]] + subject_data[
            'func'][2:]
        subject_data['session_id'] = [subject_data['session_id'][0]
                                      ] + subject_data['session_id'][2:]

    # sanitize subject output dir
    if not 'output_dir' in subject_data:
        subject_data['output_dir'] = os.path.join(
            output_dir, subject_data['subject_id'])

    # preprocess the data
    subject_data = do_subject_preproc(SubjectData(**subject_data),
                                      do_realign=True,
                                      do_coreg=True,
                                      do_report=False,
                                      do_cv_tc=False
                                      )
    assert not subject_data.anat is None

    # norm
    if do_normalize:
        subject_data = nipype_do_subject_preproc(
            subject_data,
            do_realign=False,
            do_coreg=False,
            do_segment=True,
            do_normalize=True,
            func_write_voxel_sizes=[3, 3, 3],
            anat_write_voxel_sizes=[2, 2, 2],
            fwhm=fwhm,
            hardlink_output=False,
            do_report=False
            )

    # chronometry
    stats_start_time = pretty_time()

    # to-be merged lists, one item per run
    paradigms = []
    frametimes_list = []
    design_matrices = []  # one
    list_of_contrast_dicts = []  # one dict per run
    n_scans = []
    for run_id in run_ids:
        _n_scans = len(subject_data.func[run_id])
        n_scans.append(_n_scans)

        # make paradigm
        paradigm = make_paradigm(getattr(subject_data, 'timing')[run_id])

        # make design matrix
        tr = 2.
        drift_model = 'Cosine'
        hrf_model = 'Canonical With Derivative'
        hfcut = 128.
        frametimes = np.linspace(0, (_n_scans - 1) * tr, _n_scans)
        design_matrix = make_dmtx(
            frametimes,
            paradigm, hrf_model=hrf_model,
            drift_model=drift_model, hfcut=hfcut,
            add_regs=np.loadtxt(getattr(subject_data,
                                        'realignment_parameters')[run_id]),
            add_reg_names=[
                'Translation along x axis',
                'Translation along yaxis',
                'Translation along z axis',
                'Rotation along x axis',
                'Rotation along y axis',
                'Rotation along z axis'
                ]
            )

        # import matplotlib.pyplot as plt
        # design_matrix.show()
        # plt.show()

        paradigms.append(paradigm)
        design_matrices.append(design_matrix)
        frametimes_list.append(frametimes)
        n_scans.append(_n_scans)

        # specify contrasts
        contrasts = {}
        n_columns = len(design_matrix.names)
        for i in xrange(paradigm.n_conditions):
            contrasts['%s' % design_matrix.names[2 * i]] = np.eye(
                n_columns)[2 * i]

        # more interesting contrasts"""
        contrasts['Famous-Unfamiliar'] = contrasts[
            'Famous'] - contrasts['Unfamiliar']
        contrasts['Unfamiliar-Famous'] = -contrasts['Famous-Unfamiliar']
        contrasts['Famous-Scrambled'] = contrasts[
            'Famous'] - contrasts['Scrambled']
        contrasts['Scrambled-Famous'] = -contrasts['Famous-Scrambled']
        contrasts['Unfamiliar-Scrambled'] = contrasts[
            'Unfamiliar'] - contrasts['Scrambled']
        contrasts['Scrambled-Unfamiliar'] = -contrasts['Unfamiliar-Scrambled']

        list_of_contrast_dicts.append(contrasts)

    # importat maps
    z_maps = {}
    effects_maps = {}

    # fit GLM
    print('\r\nFitting a GLM (this takes time) ..')
    fmri_glm = FMRILinearModel([nibabel.concat_images(sess_func)
                                for sess_func in subject_data.func],
                               [design_matrix.matrix
                                for design_matrix in design_matrices],
                               mask='compute')
    fmri_glm.fit(do_scaling=True, model='ar1')

    print "... done.\r\n"

    # save computed mask
    mask_path = os.path.join(subject_data.output_dir, "mask.nii.gz")

    print "Saving mask image to %s ..." % mask_path
    nibabel.save(fmri_glm.mask, mask_path)
    print "... done.\r\n"

    # replicate contrasts across runs
    contrasts = dict((cid, [contrasts[cid]
                            for contrasts in list_of_contrast_dicts])
                     for cid, cval in contrasts.iteritems())

    # compute effects
    for contrast_id, contrast_val in contrasts.iteritems():
        print "\tcontrast id: %s" % contrast_id
        z_map, eff_map = fmri_glm.contrast(
            contrast_val,
            con_id=contrast_id,
            output_z=True,
            output_stat=False,
            output_effects=True,
            output_variance=False
            )

        # store stat maps to disk
        for map_type, out_map in zip(['z', 'effects'], [z_map, eff_map]):
            map_dir = os.path.join(
                subject_data.output_dir, '%s_maps' % map_type)
            if not os.path.exists(map_dir):
                os.makedirs(map_dir)
            map_path = os.path.join(
                map_dir, '%s.nii.gz' % contrast_id)
            print "\t\tWriting %s ..." % map_path
            nibabel.save(out_map, map_path)

            # collect zmaps for contrasts we're interested in
            if map_type == 'z':
                z_maps[contrast_id] = map_path

            if map_type == 'effects':
                effects_maps[contrast_id] = map_path

    # remove repeated contrasts
    contrasts = dict((cid, cval[0]) for cid, cval in contrasts.iteritems())

    # do stats report
    stats_report_filename = os.path.join(getattr(subject_data,
                                                 'reports_output_dir',
                                                 subject_data.output_dir),
                                         "report_stats.html")
    generate_subject_stats_report(
        stats_report_filename,
        contrasts,
        z_maps,
        fmri_glm.mask,
        threshold=threshold,
        cluster_th=cluster_th,
        slicer=slicer,
        cut_coords=cut_coords,
        anat=nibabel.load(subject_data.anat).get_data(),
        anat_affine=nibabel.load(subject_data.anat).get_affine(),
        design_matrices=design_matrices,
        subject_id=subject_data.subject_id,
        start_time=stats_start_time,
        title="GLM for subject %s" % subject_data.subject_id,

        # additional ``kwargs`` for more informative report
        TR=tr,
        n_scans=n_scans,
        hfcut=hfcut,
        drift_model=drift_model,
        hrf_model=hrf_model,
        paradigm=dict(("Run_%02i" % (run_id + 1), paradigms[run_id].__dict__)
                      for run_id in run_ids),
        frametimes=dict(("Run_%02i" % (run_id + 1), frametimes_list[run_id])
                        for run_id in run_ids),
        # fwhm=fwhm
        )

    ProgressReport().finish_dir(subject_data.output_dir)
    print "\r\nStatistic report written to %s\r\n" % stats_report_filename

    return contrasts, effects_maps, z_maps, mask_path
Exemplo n.º 11
0
def do_subject_glm(subject_data):
    """FE analysis for a single subject."""
    subject_id = subject_data['subject_id']
    output_dir = subject_data["output_dir"]
    func_files = subject_data['func']
    anat = subject_data['anat']
    onset_files = subject_data['onset']
    tr = subject_data['TR']
    time_units = subject_data['time_units'].lower()
    assert time_units in ["seconds", "tr", "milliseconds"]
    drift_model = subject_data['drift_model']
    hrf_model = subject_data["hrf_model"]
    hfcut = subject_data["hfcut"]
    mem = Memory(os.path.join(output_dir, "cache"))
    if not os.path.exists(output_dir):
        os.makedirs(output_dir)

    if 0:
        subject_data = mem.cache(do_subject_preproc)(dict(
            func=func_files, anat=anat, output_dir=output_dir))
        func_files = subject_data['func']
        anat = subject_data['anat']

        # reslice func images
        func_files = [
            mem.cache(reslice_vols)(sess_func,
                                    target_affine=nibabel.load(
                                        sess_func[0]).get_affine())
            for sess_func in func_files
        ]

    ### GLM: loop on (session_bold, onse_file) pairs over the various sessions
    design_matrices = []
    for func_file, onset_file in zip(func_files, onset_files):
        if isinstance(func_file, str):
            bold = nibabel.load(func_file)
        else:
            if len(func_file) == 1:
                func_file = func_file[0]
                bold = nibabel.load(func_file)
                assert len(bold.shape) == 4
                n_scans = bold.shape[-1]
                del bold
            else:
                n_scans = len(func_file)
        frametimes = np.linspace(0, (n_scans - 1) * tr, n_scans)
        conditions, onsets, durations, amplitudes = parse_onset_file(
            onset_file)
        if time_units == "tr":
            onsets *= tr
            durations *= tr
        elif time_units in ["milliseconds"]:
            onsets *= 1e-3
            durations *= 1e-3
        paradigm = BlockParadigm(con_id=conditions,
                                 onset=onsets,
                                 duration=durations,
                                 amplitude=amplitudes)
        design_matrices.append(
            make_dmtx(frametimes,
                      paradigm,
                      hrf_model=hrf_model,
                      drift_model=drift_model,
                      hfcut=hfcut))

    # specify contrasts
    n_columns = len(design_matrices[0].names)
    contrasts = {}
    for i in range(paradigm.n_conditions):
        contrasts['%s' %
                  design_matrices[0].names[2 * i]] = np.eye(n_columns)[2 * i]

    # effects of interest F-test
    diff_contrasts = []
    for i in range(paradigm.n_conditions - 1):
        a = contrasts[design_matrices[0].names[2 * i]]
        b = contrasts[design_matrices[0].names[2 * (i + 1)]]
        diff_contrasts.append(a - b)
    contrasts["diff"] = diff_contrasts

    # fit GLM
    print('Fitting a GLM (this takes time)...')
    fmri_glm = FMRILinearModel([
        nibabel.concat_images(sess_func, check_affines=False)
        for sess_func in func_files
    ], [design_matrix.matrix for design_matrix in design_matrices],
                               mask='compute')
    fmri_glm.fit(do_scaling=True, model='ar1')

    # save computed mask
    mask_path = os.path.join(output_dir, "mask.nii.gz")

    print("Saving mask image %s" % mask_path)
    nibabel.save(fmri_glm.mask, mask_path)

    # compute contrasts
    z_maps = {}
    effects_maps = {}
    for contrast_id, contrast_val in contrasts.items():
        print("\tcontrast id: %s" % contrast_id)
        if np.ndim(contrast_val) > 1:
            contrast_type = "t"
        else:
            contrast_type = "F"
        z_map, t_map, effects_map, var_map = fmri_glm.contrast(
            [contrast_val] * len(func_files),
            con_id=contrast_id,
            contrast_type=contrast_type,
            output_z=True,
            output_stat=True,
            output_effects=True,
            output_variance=True)

        # store stat maps to disk
        for map_type, out_map in zip(['z', 't', 'effects', 'variance'],
                                     [z_map, t_map, effects_map, var_map]):
            map_dir = os.path.join(output_dir, '%s_maps' % map_type)
            if not os.path.exists(map_dir):
                os.makedirs(map_dir)
            map_path = os.path.join(map_dir, '%s.nii.gz' % contrast_id)
            print("\t\tWriting %s ..." % map_path)
            nibabel.save(out_map, map_path)

            # collect zmaps for contrasts we're interested in
            if map_type == 'z':
                z_maps[contrast_id] = map_path
            if map_type == 'effects':
                effects_maps[contrast_id] = map_path

    return subject_id, anat, effects_maps, z_maps, contrasts, fmri_glm.mask
Exemplo n.º 12
0
def do_subject_glm(subject_data):
    """FE analysis for a single subject."""
    subject_id = subject_data['subject_id']
    output_dir = subject_data["output_dir"]
    func_files = subject_data['func']
    anat = subject_data['anat']
    onset_files = subject_data['onset']
    # subject_id = os.path.basename(subject_dir)
    # subject_output_dir = os.path.join(output_dir, subject_id)
    mem = Memory(os.path.join(output_dir, "cache"))
    if not os.path.exists(output_dir):
        os.makedirs(output_dir)

    # glob files: anat, session func files, session onset files
    # anat = glob.glob(os.path.join(subject_dir, anat_wildcard))
    # assert len(anat) == 1
    # anat = anat[0]
    # onset_files = sorted([glob.glob(os.path.join(subject_dir, session))[0]
    #                       for session in session_onset_wildcards])
    # func_files = sorted([sorted(glob.glob(os.path.join(subject_dir, session)))
    #                      for session in session_func_wildcards])

    ### Preprocess data #######################################################
    if 0:
        subject_data = mem.cache(do_subject_preproc)(
            dict(func=func_files, anat=anat, output_dir=output_dir))
        func_files = subject_data['func']
        anat = subject_data['anat']

        # reslice func images
        func_files = [mem.cache(reslice_vols)(
            sess_func,
            target_affine=nibabel.load(sess_func[0]).get_affine())
                      for sess_func in func_files]

    ### GLM: loop on (session_bold, onse_file) pairs over the various sessions
    design_matrices = []
    for session, (func_file, onset_file) in enumerate(zip(func_files,
                                                          onset_files)):
        if isinstance(func_file, str):
            bold = nibabel.load(func_file)
        else:
            if len(func_file) == 1:
                func_file = func_file[0]
                bold = nibabel.load(func_file)
                assert len(bold.shape) == 4
                n_scans = bold.shape[-1]
                del bold
            else:
                n_scans = len(func_file)
        frametimes = np.linspace(0, (n_scans - 1) * tr, n_scans)
        conditions, onsets, durations, amplitudes = parse_onset_file(
            onset_file)
        onsets *= tr
        durations *= tr
        paradigm = BlockParadigm(con_id=conditions, onset=onsets,
                                 duration=durations, amplitude=amplitudes)
        design_matrices.append(make_dmtx(frametimes,
                                         paradigm, hrf_model=hrf_model,
                                         drift_model=drift_model,
                                         hfcut=hfcut))

    # specify contrasts
    n_columns = len(design_matrices[0].names)
    contrasts = {}
    for i in xrange(paradigm.n_conditions):
        contrasts['%s' % design_matrices[0].names[2 * i]
                  ] = np.eye(n_columns)[2 * i]

    # more interesting contrasts
    contrasts['faces-scrambled'] = contrasts['faces'
                                             ] - contrasts['scrambled']
    contrasts['scrambled-faces'] = -contrasts['faces-scrambled']
    contrasts['effects_of_interest'] = contrasts['faces'
                                                 ] + contrasts['scrambled']

    # effects of interest F-test
    diff_contrasts = []
    for i in xrange(paradigm.n_conditions - 1):
        a = contrasts[design_matrices[0].names[2 * i]]
        b = contrasts[design_matrices[0].names[2 * (i + 1)]]
        diff_contrasts.append(a - b)
    contrasts["diff"] = diff_contrasts

    # fit GLM
    print 'Fitting a GLM (this takes time)...'
    fmri_glm = FMRILinearModel([nibabel.concat_images(sess_func,
                                                      check_affines=False)
                                for sess_func in func_files],
                               [design_matrix.matrix
                                for design_matrix in design_matrices],
                               mask='compute'
                               )
    fmri_glm.fit(do_scaling=True, model='ar1')

    # save computed mask
    mask_path = os.path.join(output_dir, "mask.nii.gz")

    print "Saving mask image %s" % mask_path
    nibabel.save(fmri_glm.mask, mask_path)

    # compute contrasts
    z_maps = {}
    effects_maps = {}
    for contrast_id, contrast_val in contrasts.iteritems():
        print "\tcontrast id: %s" % contrast_id
        if np.ndim(contrast_val) > 1:
            contrast_type = "t"
        else:
            contrast_type = "F"
        z_map, t_map, effects_map, var_map = fmri_glm.contrast(
            [contrast_val] * 2,
            con_id=contrast_id,
            contrast_type=contrast_type,
            output_z=True,
            output_stat=True,
            output_effects=True,
            output_variance=True
            )

        # store stat maps to disk
        for map_type, out_map in zip(['z', 't', 'effects', 'variance'],
                                  [z_map, t_map, effects_map, var_map]):
            map_dir = os.path.join(
                output_dir, '%s_maps' % map_type)
            if not os.path.exists(map_dir):
                os.makedirs(map_dir)
            map_path = os.path.join(
                map_dir, '%s.nii.gz' % contrast_id)
            print "\t\tWriting %s ..." % map_path
            nibabel.save(out_map, map_path)

            # collect zmaps for contrasts we're interested in
            if map_type == 'z':
                z_maps[contrast_id] = map_path
            if map_type == 'effects':
                effects_maps[contrast_id] = map_path

    return subject_id, anat, effects_maps, z_maps, contrasts, fmri_glm.mask
Exemplo n.º 13
0
def load_glm_params(data_dir, model_id,
                    subject_ids=None,
                    hrf_model='canonical',
                    drift_model='cosine',
                    motion_params=None,
                    is_event_paradigm=True):
    """Load GLM parameters

    XXX document this code!

    """

    docs = []
    study_id = os.path.split(data_dir)[-1]
    tr = float(open(os.path.join(data_dir, 'scan_key.txt')).read().split()[1])

    if subject_ids is None:
        subject_dirs = sorted(glob.glob(os.path.join(data_dir, 'sub*')))
    else:
        subject_dirs = sorted([os.path.join(data_dir, subject_id)
                               for subject_id in subject_ids])

    for subject_dir in subject_dirs:
        params = {}
        subject_id = os.path.split(subject_dir)[1]
        params['study'] = study_id
        params['subject'] = subject_id

        # subjects models
        model_dir = os.path.join(subject_dir, 'model', model_id)
        session_dirs = sorted(glob.glob(
            os.path.join(model_dir, 'onsets', '*')))
        for i, session_dir in enumerate(session_dirs):
            session_id = os.path.split(session_dir)[1]
            img = nb.load(os.path.join(data_dir, subject_id, 'BOLD',
                                       session_id, 'bold.nii.gz'))
            n_scans = img.shape[-1]
            params.setdefault('sessions', []).append(session_id)
            params.setdefault('n_scans', []).append(n_scans)

            onsets = None
            cond_vect = None
            cond_files = sorted(glob.glob(os.path.join(session_dir, 'cond*')))
            for cond_file in cond_files:
                cond_id = int(re.findall('cond(.*).txt',
                                         os.path.split(cond_file)[1])[0])

                # load paradigm
                # replace whitespace characters by spaces before parsing
                cond = open(cond_file, 'rb').read()
                iterable = [
                    row.strip()
                    for row in re.sub('[\t\r\f\v]', ' ', cond).split('\n')]
                reader = csv.reader(iterable, delimiter=' ')
                rows = list(reader)
                if onsets is None:
                    onsets = [float(row[0]) for row in rows if row != []]
                    cond_vect = [cond_id] * len(rows)
                else:
                    onsets += [float(row[0]) for row in rows if row != []]
                    cond_vect += [cond_id] * len(rows)

            onsets = np.array(onsets)
            cond_vect = np.array(cond_vect)
            order = np.argsort(onsets)
            if is_event_paradigm:
                paradigm = EventRelatedParadigm(
                    cond_vect[order], onsets[order])
            else:
                paradigm = BlockParadigm(cond_vect[order], onsets[order])
            frametimes = np.linspace(0, (n_scans - 1) * tr, n_scans)
            if motion_params is None or not subject_id in motion_params:
                design_matrix = make_dmtx(
                    frametimes, paradigm, hrf_model=hrf_model,
                    drift_model=drift_model)
            else:
                add_regs = motion_params[subject_id][i]
                add_reg_names = ['motion_%i' % r
                                 for r in range(add_regs.shape[1])]
                design_matrix = make_dmtx(
                    frametimes, paradigm, hrf_model=hrf_model,
                    drift_model=drift_model,
                    add_regs=add_regs, add_reg_names=add_reg_names)

                # plot and save dmat
                ax = design_matrix.show()
                ax.set_position([.05, .25, .9, .65])
                ax.set_title('Design matrix (%s, %s)' % (subject_id,
                                                         session_id))
                dmat_outfile = os.path.join(session_dir, 'design_matrix.png')
                print "Saving design matrix %s" % dmat_outfile
                pl.savefig(dmat_outfile)

            params.setdefault('design_matrices', []).append(
                design_matrix.matrix)

        docs.append(params)

    # study model
    n_regressors = [dm.shape[-1] for dm in params['design_matrices']]
    model_dir = os.path.join(data_dir, 'models', model_id)

    # replace whitespace characters by spaces before parsing
    con = open(os.path.join(model_dir, 'task_contrasts.txt'), 'rb').read()
    iterable = [row.strip()
                for row in re.sub('[\t\r\f\v]', ' ', con).split('\n')]
    reader = csv.reader(iterable, delimiter=' ')
    # reader = csv.reader(contrasts_file, delimiter=' ')

    contrasts = {}
    shift = 0
    for row in reader:
        if row != []:
            for n_regressor in n_regressors:
                if row[0].startswith('task'):  # file format varies
                    shift = 1
                contrast_id = row[0 + shift]
                # append zeros to contrasts for the confounds
                contrast = np.hstack(
                    [np.array(row[1 + shift:]).astype('float'),
                     np.zeros(n_regressor - len(row[1 + shift:]))])

                contrasts.setdefault(contrast_id, []).append(contrast)

    for doc in docs:
        doc['contrasts'] = contrasts

    return docs
Exemplo n.º 14
0
def execute_spm_auditory_glm(data, reg_motion=False):
    reg_motion = reg_motion and 'realignment_parameters' in data

    tr = 7.
    n_scans = 96
    _duration = 6
    epoch_duration = _duration * tr
    conditions = ['rest', 'active'] * 8
    duration = epoch_duration * np.ones(len(conditions))
    onset = np.linspace(0, (len(conditions) - 1) * epoch_duration,
                        len(conditions))
    paradigm = BlockParadigm(con_id=conditions, onset=onset, duration=duration)
    hfcut = 2 * 2 * epoch_duration

    # construct design matrix
    frametimes = np.linspace(0, (n_scans - 1) * tr, n_scans)
    drift_model = 'Cosine'
    hrf_model = 'Canonical With Derivative'

    add_reg_names = None
    add_regs = None
    if reg_motion:
        add_reg_names = ['tx', 'ty', 'tz', 'rx', 'ry', 'rz']
        add_regs = data['realignment_parameters'][0]
        if isinstance(add_regs, basestring):
            add_regs = np.loadtxt(add_regs)

    design_matrix = make_dmtx(frametimes,
                              paradigm, hrf_model=hrf_model,
                              drift_model=drift_model, hfcut=hfcut,
                              add_reg_names=add_reg_names,
                              add_regs=add_regs)

    # plot and save design matrix
    ax = design_matrix.show()
    ax.set_position([.05, .25, .9, .65])
    ax.set_title('Design matrix')
    dmat_outfile = os.path.join(data['output_dir'],
                                'design_matrix.png')
    pl.savefig(dmat_outfile, bbox_inches="tight", dpi=200)

    # specify contrasts
    contrasts = {}
    n_columns = len(design_matrix.names)
    for i in xrange(paradigm.n_conditions):
        contrasts['%s' % design_matrix.names[2 * i]] = np.eye(n_columns)[2 * i]

    # more interesting contrasts"""
    contrasts['active-rest'] = contrasts['active'] - contrasts['rest']

    # fit GLM
    print('\r\nFitting a GLM (this takes time)...')
    fmri_glm = FMRILinearModel(load_4D_img(data['func'][0]),
                               design_matrix.matrix,
                               mask='compute')

    fmri_glm.fit(do_scaling=True, model='ar1')

    # save computed mask
    mask_path = os.path.join(data['output_dir'], "mask.nii.gz")
    print "Saving mask image %s..." % mask_path
    nibabel.save(fmri_glm.mask, mask_path)

    # compute bg unto which activation will be projected
    anat_img = load_vol(data['anat'])

    anat = anat_img.get_data()

    if anat.ndim == 4:
        anat = anat[..., 0]

    anat_affine = anat_img.get_affine()

    print "Computing contrasts..."
    z_maps = {}
    for contrast_id, contrast_val in contrasts.iteritems():
        print "\tcontrast id: %s" % contrast_id
        z_map, t_map, eff_map, var_map = fmri_glm.contrast(
            contrasts[contrast_id],
            con_id=contrast_id,
            output_z=True,
            output_stat=True,
            output_effects=True,
            output_variance=True,
            )

        # store stat maps to disk
        for dtype, out_map in zip(['z', 't', 'effects', 'variance'],
                                  [z_map, t_map, eff_map, var_map]):
            map_dir = os.path.join(
                data['output_dir'], '%s_maps' % dtype)
            if not os.path.exists(map_dir):
                os.makedirs(map_dir)
            map_path = os.path.join(
                map_dir, '%s.nii.gz' % contrast_id)
            nibabel.save(out_map, map_path)

            # collect zmaps for contrasts we're interested in
            if contrast_id == 'active-rest' and dtype == "z":
                z_maps[contrast_id] = map_path

            print "\t\t%s map: %s" % (dtype, map_path)

        print

    # do stats report
    stats_report_filename = os.path.join(data['reports_output_dir'],
                                         "report_stats.html")
    contrasts = dict((contrast_id, contrasts[contrast_id])
                     for contrast_id in z_maps.keys())
    generate_subject_stats_report(
        stats_report_filename,
        contrasts,
        z_maps,
        fmri_glm.mask,
        design_matrices=[design_matrix],
        subject_id=data['subject_id'],
        anat=anat,
        anat_affine=anat_affine,
        cluster_th=50,  # we're only interested in this 'large' clusters

        # additional ``kwargs`` for more informative report
        paradigm=paradigm.__dict__,
        TR=tr,
        n_scans=n_scans,
        hfcut=hfcut,
        frametimes=frametimes,
        drift_model=drift_model,
        hrf_model=hrf_model,
        )

    ProgressReport().finish_dir(data['output_dir'])

    print "\r\nStatistic report written to %s\r\n" % stats_report_filename
Exemplo n.º 15
0
n_scans = 128
tr = 2.4
# paradigm
frametimes = np.linspace(0.5 * tr, (n_scans - .5) * tr, n_scans)

fmri_data = nibabel.load('s12069_swaloc1_corr.nii.gz')

########################################
# Design matrix
########################################

paradigm = load_paradigm_from_csv_file('localizer_paradigm.csv')['0']

design_matrix = make_dmtx(frametimes,
                          paradigm,
                          hrf_model='canonical with derivative',
                          drift_model="cosine",
                          hfcut=128)

#########################################
# Specify the contrasts
#########################################

# simplest ones
contrasts = {}
n_columns = len(design_matrix.names)
for i in range(paradigm.n_conditions):
    contrasts['%s' % design_matrix.names[2 * i]] = np.eye(n_columns)[2 * i]

# Our contrast of interest
reading_vs_visual = contrasts["phrasevideo"] - contrasts["damier_H"]
Exemplo n.º 16
0
def do_subject_glm(subject_data):
    """FE analysis for a single subject."""
    subject_id = subject_data['subject_id']
    output_dir = subject_data["output_dir"]
    func_files = subject_data['func']
    anat = subject_data['anat']
    onset_files = subject_data['onset']
    # subject_id = os.path.basename(subject_dir)
    # subject_output_dir = os.path.join(output_dir, subject_id)
    mem = Memory(os.path.join(output_dir, "cache"))
    if not os.path.exists(output_dir):
        os.makedirs(output_dir)

    # glob files: anat, session func files, session onset files
    # anat = glob.glob(os.path.join(subject_dir, anat_wildcard))
    # assert len(anat) == 1
    # anat = anat[0]
    # onset_files = sorted([glob.glob(os.path.join(subject_dir, session))[0]
    #                       for session in session_onset_wildcards])
    # func_files = sorted([sorted(glob.glob(os.path.join(subject_dir, session)))
    #                      for session in session_func_wildcards])

    ### Preprocess data #######################################################
    if 0:
        subject_data = mem.cache(do_subject_preproc)(
            dict(func=func_files, anat=anat, output_dir=output_dir))
        func_files = subject_data['func']
        anat = subject_data['anat']

        # reslice func images
        func_files = [mem.cache(reslice_vols)(
            sess_func,
            target_affine=nibabel.load(sess_func[0]).get_affine())
                      for sess_func in func_files]

    ### GLM: loop on (session_bold, onse_file) pairs over the various sessions
    design_matrices = []
    for session, (func_file, onset_file) in enumerate(zip(func_files,
                                                          onset_files)):
        if isinstance(func_file, str):
            bold = nibabel.load(func_file)
        else:
            if len(func_file) == 1:
                func_file = func_file[0]
                bold = nibabel.load(func_file)
                assert len(bold.shape) == 4
                n_scans = bold.shape[-1]
                del bold
            else:
                n_scans = len(func_file)
        frametimes = np.linspace(0, (n_scans - 1) * tr, n_scans)
        conditions, onsets, durations, amplitudes = parse_onset_file(
            onset_file)
        onsets *= tr
        durations *= tr
        paradigm = BlockParadigm(con_id=conditions, onset=onsets,
                                 duration=durations, amplitude=amplitudes)
        design_matrices.append(make_dmtx(frametimes,
                                         paradigm, hrf_model=hrf_model,
                                         drift_model=drift_model,
                                         hfcut=hfcut))

    # specify contrasts
    n_columns = len(design_matrices[0].names)
    contrasts = {}
    for i in xrange(paradigm.n_conditions):
        contrasts['%s' % design_matrices[0].names[2 * i]
                  ] = np.eye(n_columns)[2 * i]

    # more interesting contrasts
    contrasts['faces-scrambled'] = contrasts['faces'
                                             ] - contrasts['scrambled']
    contrasts['scrambled-faces'] = -contrasts['faces-scrambled']
    contrasts['effects_of_interest'] = contrasts['faces'
                                                 ] + contrasts['scrambled']

    # effects of interest F-test
    diff_contrasts = []
    for i in xrange(paradigm.n_conditions - 1):
        a = contrasts[design_matrices[0].names[2 * i]]
        b = contrasts[design_matrices[0].names[2 * (i + 1)]]
        diff_contrasts.append(a - b)
    contrasts["diff"] = diff_contrasts

    # fit GLM
    print 'Fitting a GLM (this takes time)...'
    fmri_glm = FMRILinearModel([nibabel.concat_images(sess_func,
                                                      check_affines=False)
                                for sess_func in func_files],
                               [design_matrix.matrix
                                for design_matrix in design_matrices],
                               mask='compute'
                               )
    fmri_glm.fit(do_scaling=True, model='ar1')

    # save computed mask
    mask_path = os.path.join(output_dir, "mask.nii.gz")

    print "Saving mask image %s" % mask_path
    nibabel.save(fmri_glm.mask, mask_path)

    # compute contrasts
    z_maps = {}
    effects_maps = {}
    for contrast_id, contrast_val in contrasts.iteritems():
        print "\tcontrast id: %s" % contrast_id
        if np.ndim(contrast_val) > 1:
            contrast_type = "t"
        else:
            contrast_type = "F"
        z_map, t_map, effects_map, var_map = fmri_glm.contrast(
            [contrast_val] * 2,
            con_id=contrast_id,
            contrast_type=contrast_type,
            output_z=True,
            output_stat=True,
            output_effects=True,
            output_variance=True
            )

        # store stat maps to disk
        for map_type, out_map in zip(['z', 't', 'effects', 'variance'],
                                  [z_map, t_map, effects_map, var_map]):
            map_dir = os.path.join(
                output_dir, '%s_maps' % map_type)
            if not os.path.exists(map_dir):
                os.makedirs(map_dir)
            map_path = os.path.join(
                map_dir, '%s.nii.gz' % contrast_id)
            print "\t\tWriting %s ..." % map_path
            nibabel.save(out_map, map_path)

            # collect zmaps for contrasts we're interested in
            if map_type == 'z':
                z_maps[contrast_id] = map_path
            if map_type == 'effects':
                effects_maps[contrast_id] = map_path

    return subject_id, anat, effects_maps, z_maps, contrasts, fmri_glm.mask
Exemplo n.º 17
0
Arquivo: glm.py Projeto: Solvi/pyhrf
    mask_array = load(mask_file).get_data()
    m = np.where(mask_array > 0)
    pyhrf.verbose(1, 'Mask: shape=%s, nb vox in mask=%d'
                  %(str(mask_array.shape), mask_array.sum()))

    # BOLD data
    fmri_image = load(bold_file)
    #pyhrf.verbose(1, 'BOLD shape : %s' %str(fmri_image.get_shape()))
    Y = fmri_image.get_data()[m[0],m[1],m[2],:]
    n_scans = Y.shape[1]
    pyhrf.verbose(1, 'Loaded BOLD: nvox=%d, nscans=%d' %Y.shape)

    # Design matrix
    frametimes = np.linspace(0, (n_scans-1)*tr, n_scans)
    design_matrix = dm.make_dmtx(frametimes, paradigm,
                                 hrf_model=hrf_model,
                                 drift_model=drift_model, hfcut=hfcut,
                                 fir_delays=fir_delays)
    ns, nr = design_matrix.matrix.shape
    pyhrf.verbose(2, 'Design matrix built with %d regressors:' %nr)
    for rn in design_matrix.names:
        pyhrf.verbose(2, '    - %s' %rn)


    cdesign_matrix = xndarray(design_matrix.matrix,
                            axes_names=['time','regressor'],
                            axes_domains={'time':np.arange(ns)*tr,
                                          'regressor':design_matrix.names})

    cdesign_matrix.save(op.join(output_dir, 'design_matrix.nii'))
    import cPickle
    f = open(op.join(output_dir, 'design_matrix.pck'), 'w')
Exemplo n.º 18
0
def execute_spm_multimodal_fmri_glm(data, reg_motion=False):
    reg_motion = reg_motion and 'realignment_parameters' in data

    # experimental paradigm meta-params
    stats_start_time = time.ctime()
    tr = 2.
    drift_model = 'Cosine'
    hrf_model = 'Canonical With Derivative'
    hfcut = 128.

    # make design matrices
    design_matrices = []
    for x in xrange(2):
        n_scans = data['func'][x].shape[-1]

        timing = scipy.io.loadmat(data['trials_ses%i' % (x + 1)],
                                  squeeze_me=True, struct_as_record=False)

        faces_onsets = timing['onsets'][0].ravel()
        scrambled_onsets = timing['onsets'][1].ravel()
        onsets = np.hstack((faces_onsets, scrambled_onsets))
        onsets *= tr  # because onsets were reporting in 'scans' units
        conditions = ['faces'] * len(faces_onsets) + ['scrambled'] * len(
            scrambled_onsets)
        paradigm = EventRelatedParadigm(conditions, onsets)
        frametimes = np.linspace(0, (n_scans - 1) * tr, n_scans)

        add_reg_names = None
        add_regs = None
        if reg_motion:
            add_reg_names = ['tx', 'ty', 'tz', 'rx', 'ry', 'rz']
            add_regs = np.loadtxt(data['realignment_parameters'][x])
            if isinstance(add_regs):
                add_regs = np.loadtxt(add_regs)
        design_matrix = make_dmtx(
            frametimes,
            paradigm, hrf_model=hrf_model,
            drift_model=drift_model, hfcut=hfcut,
            add_reg_names=add_reg_names,
            add_regs=add_regs
            )

        design_matrices.append(design_matrix)

    # specify contrasts
    contrasts = {}
    n_columns = len(design_matrix.names)
    for i in xrange(paradigm.n_conditions):
        contrasts['%s' % design_matrix.names[2 * i]] = np.eye(n_columns)[2 * i]

    # more interesting contrasts
    contrasts['faces-scrambled'] = contrasts['faces'] - contrasts['scrambled']
    contrasts['scrambled-faces'] = contrasts['scrambled'] - contrasts['faces']
    contrasts['effects_of_interest'] = contrasts[
        'faces'] + contrasts['scrambled']

    # we've thesame contrasts over sessions, so let's replicate
    contrasts = dict((contrast_id, [contrast_val] * 2)
                     for contrast_id, contrast_val in contrasts.iteritems())

    # fit GLM
    print('\r\nFitting a GLM (this takes time)...')
    fmri_glm = FMRILinearModel([load_4D_img(sess_func)
                                for sess_func in data['func']],
                               [dmat.matrix for dmat in design_matrices],
                               mask='compute')
    fmri_glm.fit(do_scaling=True, model='ar1')

    # save computed mask
    mask_path = os.path.join(data['output_dir'], "mask.nii.gz")
    print "Saving mask image %s" % mask_path
    nibabel.save(fmri_glm.mask, mask_path)

    # compute bg unto which activation will be projected
    anat_img = load_vol(data['anat'])

    anat = anat_img.get_data()

    if anat.ndim == 4:
        anat = anat[..., 0]

    anat_affine = anat_img.get_affine()

    print "Computing contrasts .."
    z_maps = {}
    for contrast_id, contrast_val in contrasts.iteritems():
        print "\tcontrast id: %s" % contrast_id
        z_map, t_map, eff_map, var_map = fmri_glm.contrast(
            contrast_val,
            con_id=contrast_id,
            output_z=True,
            output_stat=True,
            output_effects=True,
            output_variance=True,
            )

        # store stat maps to disk
        for dtype, out_map in zip(['z', 't', 'effects', 'variance'],
                                  [z_map, t_map, eff_map, var_map]):
            map_dir = os.path.join(
                data['output_dir'], '%s_maps' % dtype)
            if not os.path.exists(map_dir):
                os.makedirs(map_dir)
            map_path = os.path.join(
                map_dir, '%s.nii.gz' % contrast_id)
            nibabel.save(out_map, map_path)

            # collect zmaps for contrasts we're interested in
            if dtype == 'z':
                z_maps[contrast_id] = map_path

            print "\t\t%s map: %s" % (dtype, map_path)

    # do stats report
    data['stats_report_filename'] = os.path.join(data['reports_output_dir'],
                                                 "report_stats.html")
    contrasts = dict((contrast_id, contrasts[contrast_id])
                     for contrast_id in z_maps.keys())
    generate_subject_stats_report(
        data['stats_report_filename'],
        contrasts,
        z_maps,
        fmri_glm.mask,
        anat=anat,
        anat_affine=anat_affine,
        design_matrices=design_matrices,
        subject_id=data['subject_id'],
        cluster_th=15,  # we're only interested in this 'large' clusters
        start_time=stats_start_time,

        # additional ``kwargs`` for more informative report
        paradigm=paradigm.__dict__,
        TR=tr,
        n_scans=n_scans,
        hfcut=hfcut,
        frametimes=frametimes,
        drift_model=drift_model,
        hrf_model=hrf_model,
        )

    ProgressReport().finish_dir(data['reports_output_dir'])

    print "\r\nStatistic report written to %s\r\n" % data[
        'stats_report_filename']

    return data
Exemplo n.º 19
0
def execute_spm_auditory_glm(data, reg_motion=False):
    reg_motion = reg_motion and 'realignment_parameters' in data

    tr = 7.
    n_scans = 96
    _duration = 6
    epoch_duration = _duration * tr
    conditions = ['rest', 'active'] * 8
    duration = epoch_duration * np.ones(len(conditions))
    onset = np.linspace(0, (len(conditions) - 1) * epoch_duration,
                        len(conditions))
    paradigm = BlockParadigm(con_id=conditions, onset=onset, duration=duration)
    hfcut = 2 * 2 * epoch_duration

    # construct design matrix
    frametimes = np.linspace(0, (n_scans - 1) * tr, n_scans)
    drift_model = 'Cosine'
    hrf_model = 'Canonical With Derivative'

    add_reg_names = None
    add_regs = None
    if reg_motion:
        add_reg_names = ['tx', 'ty', 'tz', 'rx', 'ry', 'rz']
        add_regs = data['realignment_parameters'][0]
        if isinstance(add_regs, basestring):
            add_regs = np.loadtxt(add_regs)

    design_matrix = make_dmtx(frametimes,
                              paradigm, hrf_model=hrf_model,
                              drift_model=drift_model, hfcut=hfcut,
                              add_reg_names=add_reg_names,
                              add_regs=add_regs)

    # plot and save design matrix
    ax = design_matrix.show()
    ax.set_position([.05, .25, .9, .65])
    ax.set_title('Design matrix')
    dmat_outfile = os.path.join(data['output_dir'],
                                'design_matrix.png')
    pl.savefig(dmat_outfile, bbox_inches="tight", dpi=200)
    pl.close()

    # specify contrasts
    contrasts = {}
    n_columns = len(design_matrix.names)
    for i in xrange(paradigm.n_conditions):
        contrasts['%s' % design_matrix.names[2 * i]] = np.eye(n_columns)[2 * i]

    # more interesting contrasts"""
    contrasts['active-rest'] = contrasts['active'] - contrasts['rest']

    # fit GLM
    print('\r\nFitting a GLM (this takes time)...')
    fmri_glm = FMRILinearModel(load_4D_img(data['func'][0]),
                               design_matrix.matrix,
                               mask='compute')

    fmri_glm.fit(do_scaling=True, model='ar1')

    # save computed mask
    mask_path = os.path.join(data['output_dir'], "mask.nii.gz")
    print "Saving mask image %s..." % mask_path
    nibabel.save(fmri_glm.mask, mask_path)

    # compute bg unto which activation will be projected
    anat_img = load_vol(data['anat'])

    anat = anat_img.get_data()

    if anat.ndim == 4:
        anat = anat[..., 0]

    anat_affine = anat_img.get_affine()

    print "Computing contrasts..."
    z_maps = {}
    for contrast_id, contrast_val in contrasts.iteritems():
        print "\tcontrast id: %s" % contrast_id
        z_map, t_map, eff_map, var_map = fmri_glm.contrast(
            contrasts[contrast_id],
            con_id=contrast_id,
            output_z=True,
            output_stat=True,
            output_effects=True,
            output_variance=True,
            )

        # store stat maps to disk
        for dtype, out_map in zip(['z', 't', 'effects', 'variance'],
                                  [z_map, t_map, eff_map, var_map]):
            map_dir = os.path.join(
                data['output_dir'], '%s_maps' % dtype)
            if not os.path.exists(map_dir):
                os.makedirs(map_dir)
            map_path = os.path.join(
                map_dir, '%s.nii.gz' % contrast_id)
            nibabel.save(out_map, map_path)

            # collect zmaps for contrasts we're interested in
            if contrast_id == 'active-rest' and dtype == "z":
                z_maps[contrast_id] = map_path

            print "\t\t%s map: %s" % (dtype, map_path)

        print

    # do stats report
    stats_report_filename = os.path.join(data['reports_output_dir'],
                                         "report_stats.html")
    contrasts = dict((contrast_id, contrasts[contrast_id])
                     for contrast_id in z_maps.keys())
    generate_subject_stats_report(
        stats_report_filename,
        contrasts,
        z_maps,
        fmri_glm.mask,
        design_matrices=[design_matrix],
        subject_id=data['subject_id'],
        anat=anat,
        anat_affine=anat_affine,
        cluster_th=50,  # we're only interested in this 'large' clusters

        # additional ``kwargs`` for more informative report
        paradigm=paradigm.__dict__,
        TR=tr,
        n_scans=n_scans,
        hfcut=hfcut,
        frametimes=frametimes,
        drift_model=drift_model,
        hrf_model=hrf_model,
        )

    ProgressReport().finish_dir(data['output_dir'])

    print "\r\nStatistic report written to %s\r\n" % stats_report_filename
Exemplo n.º 20
0
def do_subject_glm(subject_data):
    """FE analysis for a single subject."""
    subject_id = subject_data['subject_id']
    output_dir = subject_data["output_dir"]
    func_files = subject_data['func']
    anat = subject_data['anat']
    onset_files = subject_data['onset']
    tr = subject_data['TR']
    time_units = subject_data['time_units'].lower()
    assert time_units in ["seconds", "tr", "milliseconds"]
    drift_model = subject_data['drift_model']
    hrf_model = subject_data["hrf_model"]
    hfcut = subject_data["hfcut"]
    mem = Memory(os.path.join(output_dir, "cache"))
    if not os.path.exists(output_dir):
        os.makedirs(output_dir)

    if 0:
        subject_data = mem.cache(do_subject_preproc)(
            dict(func=func_files, anat=anat, output_dir=output_dir))
        func_files = subject_data['func']
        anat = subject_data['anat']

        # reslice func images
        func_files = [mem.cache(reslice_vols)(
            sess_func, target_affine=nibabel.load(sess_func[0]).get_affine())
                      for sess_func in func_files]

    ### GLM: loop on (session_bold, onse_file) pairs over the various sessions
    design_matrices = []
    for func_file, onset_file in zip(func_files, onset_files):
        if isinstance(func_file, str):
            bold = nibabel.load(func_file)
        else:
            if len(func_file) == 1:
                func_file = func_file[0]
                bold = nibabel.load(func_file)
                assert len(bold.shape) == 4
                n_scans = bold.shape[-1]
                del bold
            else:
                n_scans = len(func_file)
        frametimes = np.linspace(0, (n_scans - 1) * tr, n_scans)
        conditions, onsets, durations, amplitudes = parse_onset_file(
            onset_file)
        if time_units == "tr":
            onsets *= tr
            durations *= tr
        elif time_units in ["milliseconds"]:
            onsets *= 1e-3
            durations *= 1e-3
        paradigm = BlockParadigm(con_id=conditions, onset=onsets,
                                 duration=durations, amplitude=amplitudes)
        design_matrices.append(make_dmtx(
                frametimes,
                paradigm, hrf_model=hrf_model,
                drift_model=drift_model, hfcut=hfcut))

    # specify contrasts
    n_columns = len(design_matrices[0].names)
    contrasts = {}
    for i in range(paradigm.n_conditions):
        contrasts['%s' % design_matrices[0].names[2 * i]
                  ] = np.eye(n_columns)[2 * i]

    # effects of interest F-test
    diff_contrasts = []
    for i in range(paradigm.n_conditions - 1):
        a = contrasts[design_matrices[0].names[2 * i]]
        b = contrasts[design_matrices[0].names[2 * (i + 1)]]
        diff_contrasts.append(a - b)
    contrasts["diff"] = diff_contrasts

    # fit GLM
    print('Fitting a GLM (this takes time)...')
    fmri_glm = FMRILinearModel([nibabel.concat_images(sess_func,
                                                      check_affines=False)
                                for sess_func in func_files],
                               [design_matrix.matrix
                                for design_matrix in design_matrices],
                               mask='compute'
                               )
    fmri_glm.fit(do_scaling=True, model='ar1')

    # save computed mask
    mask_path = os.path.join(output_dir, "mask.nii.gz")

    print("Saving mask image %s" % mask_path)
    nibabel.save(fmri_glm.mask, mask_path)

    # compute contrasts
    z_maps = {}
    effects_maps = {}
    for contrast_id, contrast_val in contrasts.items():
        print("\tcontrast id: %s" % contrast_id)
        if np.ndim(contrast_val) > 1:
            contrast_type = "t"
        else:
            contrast_type = "F"
        z_map, t_map, effects_map, var_map = fmri_glm.contrast(
            [contrast_val] * len(func_files),
            con_id=contrast_id,
            contrast_type=contrast_type,
            output_z=True,
            output_stat=True,
            output_effects=True,
            output_variance=True
            )

        # store stat maps to disk
        for map_type, out_map in zip(['z', 't', 'effects', 'variance'],
                                  [z_map, t_map, effects_map, var_map]):
            map_dir = os.path.join(
                output_dir, '%s_maps' % map_type)
            if not os.path.exists(map_dir):
                os.makedirs(map_dir)
            map_path = os.path.join(
                map_dir, '%s.nii.gz' % contrast_id)
            print("\t\tWriting %s ..." % map_path)
            nibabel.save(out_map, map_path)

            # collect zmaps for contrasts we're interested in
            if map_type == 'z':
                z_maps[contrast_id] = map_path
            if map_type == 'effects':
                effects_maps[contrast_id] = map_path

    return subject_id, anat, effects_maps, z_maps, contrasts, fmri_glm.mask
# timing
n_scans = 128
tr = 2.4
# paradigm
frametimes = np.linspace(0.5 * tr, (n_scans - .5) * tr, n_scans)

fmri_data = nibabel.load('s12069_swaloc1_corr.nii.gz')

########################################
# Design matrix
########################################

paradigm = load_paradigm_from_csv_file('localizer_paradigm.csv')['0']

design_matrix = make_dmtx(frametimes, paradigm,
                          hrf_model='canonical with derivative',
                          drift_model="cosine", hfcut=128)

#########################################
# Specify the contrasts
#########################################

# simplest ones
contrasts = {}
n_columns = len(design_matrix.names)
for i in range(paradigm.n_conditions):
    contrasts['%s' % design_matrix.names[2 * i]] = np.eye(n_columns)[2 * i]

# Our contrast of interest
reading_vs_visual = contrasts["phrasevideo"] - contrasts["damier_H"]
Exemplo n.º 22
0
                              squeeze_me=True, struct_as_record=False)

    faces_onsets = timing['onsets'][0].ravel()
    scrambled_onsets = timing['onsets'][1].ravel()
    onsets = np.hstack((faces_onsets, scrambled_onsets))
    onsets *= tr  # because onsets were reporting in 'scans' units
    conditions = ['faces'] * len(faces_onsets) + ['scrambled'] * len(
        scrambled_onsets)
    paradigm = EventRelatedParadigm(conditions, onsets)

    # build design matrix
    frametimes = np.linspace(0, (n_scans - 1) * tr, n_scans)
    design_matrix = make_dmtx(
        frametimes,
        paradigm, hrf_model=hrf_model,
        drift_model=drift_model, hfcut=hfcut,
        add_reg_names=['tx', 'ty', 'tz', 'rx', 'ry', 'rz'],
        add_regs=np.loadtxt(subject_data.realignment_parameters[x])
        )

    # specify contrasts
    contrasts = {}
    n_columns = len(design_matrix.names)
    for i in xrange(paradigm.n_conditions):
        contrasts['%s' % design_matrix.names[2 * i]
                  ] = np.eye(n_columns)[2 * i]

    # more interesting contrasts
    contrasts['faces-scrambled'] = contrasts['faces'
                                             ] - contrasts['scrambled']
    contrasts['scrambled-faces'] = -contrasts['faces-scrambled']
Exemplo n.º 23
0
def _fit_hrf_event_model(ds,
                         events,
                         time_attr,
                         condition_attr='targets',
                         design_kwargs=None,
                         glmfit_kwargs=None,
                         regr_attrs=None):
    if externals.exists('nipy', raise_=True):
        from nipy.modalities.fmri.design_matrix import make_dmtx
        from mvpa2.mappers.glm import NiPyGLMMapper

    # Decide/device condition attribute on which GLM will actually be done
    if isinstance(condition_attr, basestring):
        # must be a list/tuple/array for the logic below
        condition_attr = [condition_attr]

    glm_condition_attr = 'regressor_names'  # actual regressors
    glm_condition_attr_map = dict([(con, dict()) for con in condition_attr])  #
    # to map back to original conditions
    events = copy.deepcopy(events)  # since we are modifying in place
    for event in events:
        if glm_condition_attr in event:
            raise ValueError("Event %s already has %s defined.  Should not "
                             "happen.  Choose another name if defined it" %
                             (event, glm_condition_attr))
        compound_label = event[glm_condition_attr] = \
            'glm_label_' + '+'.join(
                str(event[con]) for con in condition_attr)
        # and mapping back to original values, without str()
        # for each condition:
        for con in condition_attr:
            glm_condition_attr_map[con][compound_label] = event[con]

    evvars = _events2dict(events)
    add_paradigm_kwargs = {}
    if 'amplitude' in evvars:
        add_paradigm_kwargs['amplitude'] = evvars['amplitude']
    # create paradigm
    if 'duration' in evvars:
        from nipy.modalities.fmri.experimental_paradigm import BlockParadigm
        # NiPy considers everything with a duration as a block paradigm
        paradigm = BlockParadigm(con_id=evvars[glm_condition_attr],
                                 onset=evvars['onset'],
                                 duration=evvars['duration'],
                                 **add_paradigm_kwargs)
    else:
        from nipy.modalities.fmri.experimental_paradigm \
                import EventRelatedParadigm
        paradigm = EventRelatedParadigm(con_id=evvars[glm_condition_attr],
                                        onset=evvars['onset'],
                                        **add_paradigm_kwargs)
    # create design matrix -- all kinds of fancy additional regr can be
    # auto-generated
    if design_kwargs is None:
        design_kwargs = {}
    if not regr_attrs is None:
        names = []
        regrs = []
        for attr in regr_attrs:
            names.append(attr)
            regrs.append(ds.sa[attr].value)
        if len(regrs) < 2:
            regrs = [regrs]
        regrs = np.hstack(regrs).T
        if 'add_regs' in design_kwargs:
            design_kwargs['add_regs'] = np.hstack(
                (design_kwargs['add_regs'], regrs))
        else:
            design_kwargs['add_regs'] = regrs
        if 'add_reg_names' in design_kwargs:
            design_kwargs['add_reg_names'].extend(names)
        else:
            design_kwargs['add_reg_names'] = names
    design_matrix = make_dmtx(ds.sa[time_attr].value, paradigm,
                              **design_kwargs)

    # push design into source dataset
    glm_regs = [(reg, design_matrix.matrix[:, i])
                for i, reg in enumerate(design_matrix.names)]

    # GLM
    glm = NiPyGLMMapper([],
                        glmfit_kwargs=glmfit_kwargs,
                        add_regs=glm_regs,
                        return_design=True,
                        return_model=True,
                        space=glm_condition_attr)

    model_params = glm(ds)

    # some regressors might be corresponding not to original condition_attr
    # so let's separate them out
    regressor_names = model_params.sa[glm_condition_attr].value
    condition_regressors = np.array(
        [v in glm_condition_attr_map.values()[0] for v in regressor_names])
    assert (condition_regressors.dtype == np.bool)
    if not np.all(condition_regressors):
        # some regressors do not correspond to conditions and would need
        # to be taken into a separate dataset
        model_params.a['add_regs'] = model_params[~condition_regressors]
        # then we process the rest
        model_params = model_params[condition_regressors]
        regressor_names = model_params.sa[glm_condition_attr].value

    # now define proper condition sa's
    for con, con_map in glm_condition_attr_map.iteritems():
        model_params.sa[con] = [con_map[v] for v in regressor_names]
    model_params.sa.pop(glm_condition_attr)  # remove generated one
    return model_params
Exemplo n.º 24
0
from numpy import array
from nipy.modalities.fmri.design_matrix import make_dmtx
from nipy.modalities.fmri.experimental_paradigm import EventRelatedParadigm
# Specify an event-related paradigm
conditions = ['smile', 'smile', 'listen', 'listen', 'read', 'read']
onsets = [30, 70, 10, 90, 40, 60] # onset time in s.
paradigm = EventRelatedParadigm(conditions, onsets)
# provide frametimes and compute the design matrix
frametimes = array(range(0, 100)) # time of the scans
X = make_dmtx(frametimes, paradigm, drift_model='polynomial', drift_order=3)
X.show()
Exemplo n.º 25
0
def fit_event_hrf_model(
        ds, events, time_attr, condition_attr='targets', design_kwargs=None,
        glmfit_kwargs=None, regr_attrs=None, return_model=False):
    """Fit a GLM with HRF regressor and yield a dataset with model parameters

    A univariate GLM is fitted for each feature and model parameters are
    returned as samples. Model parameters are returned for each regressor in
    the design matrix. Using functionality from NiPy, design matrices can be
    generated from event definitions with a variety of customizations (HRF
    model, confound regressors, ...).

    Events need to be specified as a list of dictionaries
    (see:class:`~mvpa2.misc.support.Event`) for a helper class. Each dictionary
    contains all relevant attributes to describe an event.

    HRF event model details
    -----------------------

    The event specifications are used to generate a design matrix for all
    present conditions. In addition to the mandatory ``onset`` information
    each event definition needs to include a label in order to associate
    individual events to conditions (the design matrix will contain at least
    one regressor for each condition). The name of this label attribute must
    be specified too (see ``condition_attr`` argument).

    NiPy is used to generate the actual design matrix.  It is required to
    specify a dataset sample attribute that contains time stamps for all input
    data samples (see ``time_attr``).  NiPy operation could be customized (see
    ``design_kwargs`` argument). Additional regressors from sample attributes
    of the input dataset can be included in the design matrix (see
    ``regr_attrs``).

    The actual GLM fit is also performed by NiPy and can be fully customized
    (see ``glmfit_kwargs``).

    Parameters
    ----------
    ds : Dataset
      The samples of this input dataset have to be in whatever ascending order.
    events : list
      Each event definition has to specify ``onset`` and ``duration``. All
      other attributes will be passed on to the sample attributes collection of
      the returned dataset.
    time_attr : str
      Attribute with dataset sample time stamps.
      Its values will be treated as in-the-same-unit and are used to
      determine corresponding samples from real-value onset and duration
      definitions. For HRF modeling this argument is mandatory.
    condition_attr : str
      Name of the event attribute with the condition labels.
      Can be a list of those (e.g. ['targets', 'chunks'] combination of which
      would constitute a condition.
    design_kwargs : dict
      Arbitrary keyword arguments for NiPy's make_dmtx() used for design matrix
      generation. Choose HRF model, confound regressors, etc.
    glmfit_kwargs : dict
      Arbitrary keyword arguments for NiPy's GeneralLinearModel.fit() used for
      estimating model parameter. Choose fitting algorithm: OLS or AR1.
    regr_attrs : list
      List of dataset sample attribute names that shall be extracted from the
      input dataset and used as additional regressors in the design matrix.
    return_model : bool
      Flag whether to included the fitted GLM model in the returned dataset.
      For large input data this can be problematic, as the model may contain
      the residuals (same size is input data), hence multiplies the memory
      demand. Off by default.

    Returns
    -------
    Dataset
      One sample for each regressor/condition in the design matrix is returned.
      The condition names are included as a sample attribute with the name
      specified by the ``condition_attr`` argument.  The actual design
      regressors are included as ``regressors`` sample attribute. If enabled,
      an instance with the fitted NiPy GLM results is included as a dataset
      attribute ``model``, and can be used for computing contrasts subsequently.
    """
    if externals.exists('nipy', raise_=True):
        from nipy.modalities.fmri.design_matrix import make_dmtx
        from mvpa2.mappers.glm import NiPyGLMMapper

    # Decide/device condition attribute on which GLM will actually be done
    if isinstance(condition_attr, basestring):
        # must be a list/tuple/array for the logic below
        condition_attr = [condition_attr]

    glm_condition_attr = 'regressor_names' # actual regressors
    glm_condition_attr_map = dict([(con, dict()) for con in condition_attr])    #
    # to map back to original conditions
    events = copy.deepcopy(events)  # since we are modifying in place
    for event in events:
        if glm_condition_attr in event:
            raise ValueError("Event %s already has %s defined.  Should not "
                             "happen.  Choose another name if defined it"
                             % (event, glm_condition_attr))
        compound_label = event[glm_condition_attr] = \
            'glm_label_' + '+'.join(
                str(event[con]) for con in condition_attr)
        # and mapping back to original values, without str()
        # for each condition:
        for con in condition_attr:
            glm_condition_attr_map[con][compound_label] = event[con]

    evvars = _events2dict(events)
    add_paradigm_kwargs = {}
    if 'amplitude' in evvars:
        add_paradigm_kwargs['amplitude'] = evvars['amplitude']
    # create paradigm
    if 'duration' in evvars:
        from nipy.modalities.fmri.experimental_paradigm import BlockParadigm
        # NiPy considers everything with a duration as a block paradigm
        paradigm = BlockParadigm(
                        con_id=evvars[glm_condition_attr],
                        onset=evvars['onset'],
                        duration=evvars['duration'],
                        **add_paradigm_kwargs)
    else:
        from nipy.modalities.fmri.experimental_paradigm \
                import EventRelatedParadigm
        paradigm = EventRelatedParadigm(
                        con_id=evvars[glm_condition_attr],
                        onset=evvars['onset'],
                        **add_paradigm_kwargs)
    # create design matrix -- all kinds of fancy additional regr can be
    # auto-generated
    if design_kwargs is None:
        design_kwargs = {}

    if not regr_attrs is None:
        names = []
        regrs = []
        for attr in regr_attrs:
            regr = ds.sa[attr].value
            # add rudimentary dimension for easy hstacking later on
            if regr.ndim < 2:
                regr = regr[:, np.newaxis]
            if regr.shape[1] == 1:
                names.append(attr)
            else:
                #  add one per each column of the regressor
                for i in xrange(regr.shape[1]):
                    names.append("%s.%d" % (attr, i))
            regrs.append(regr)
        regrs = np.hstack(regrs)

        if 'add_regs' in design_kwargs:
            design_kwargs['add_regs'] = np.hstack((design_kwargs['add_regs'],
                                                   regrs))
        else:
            design_kwargs['add_regs'] = regrs
        if 'add_reg_names' in design_kwargs:
            design_kwargs['add_reg_names'].extend(names)
        else:
            design_kwargs['add_reg_names'] = names

    design_matrix = make_dmtx(ds.sa[time_attr].value,
                              paradigm,
                              **design_kwargs)

    # push design into source dataset
    glm_regs = [
        (reg, design_matrix.matrix[:, i])
        for i, reg in enumerate(design_matrix.names)]

    # GLM
    glm = NiPyGLMMapper([], glmfit_kwargs=glmfit_kwargs,
            add_regs=glm_regs,
            return_design=True, return_model=return_model,
            space=glm_condition_attr)

    model_params = glm(ds)

    # some regressors might be corresponding not to original condition_attr
    # so let's separate them out
    regressor_names = model_params.sa[glm_condition_attr].value
    condition_regressors = np.array([v in glm_condition_attr_map.values()[0]
                                     for v in regressor_names])
    assert(condition_regressors.dtype == np.bool)
    if not np.all(condition_regressors):
        # some regressors do not correspond to conditions and would need
        # to be taken into a separate dataset
        model_params.a['add_regs'] = model_params[~condition_regressors]
        # then we process the rest
        model_params = model_params[condition_regressors]
        regressor_names = model_params.sa[glm_condition_attr].value

    # now define proper condition sa's
    for con, con_map in glm_condition_attr_map.iteritems():
        model_params.sa[con] = [con_map[v] for v in regressor_names]
    model_params.sa.pop(glm_condition_attr) # remove generated one
    return model_params
Exemplo n.º 26
0
def runsub(sub, thisContrast, thisContrastStr, testContrast,
           filterLen, filterOrd, write=False, debug=False,
           alphas=1, roi='grayMatter'):
    thisSub = {sub: subList[sub]}
    mc_params = lmvpa.loadmotionparams(paths, thisSub)
    beta_events = lmvpa.loadevents(paths, thisSub)
    dsdict = lmvpa.loadsubdata(paths, thisSub, m=roi, c='trial_type')
    thisDS = dsdict[sub]

    # savitsky golay filtering
    sg.sg_filter(thisDS, filterLen, filterOrd)
    # gallant group zscores before regression.

    # zscore w.r.t. rest trials
    # zscore(thisDS, param_est=('targets', ['rest']), chunks_attr='chunks')
    # zscore entire set. if done chunk-wise, there is no double-dipping (since we leave a chunk out at a time).
    zscore(thisDS, chunks_attr='chunks')

    # kay method: leave out a model run, use it to fit an HRF for each voxel
    # huth method: essentially use FIR
    # mumford method: deconvolution with canonical HRF

    # get timing data from timing files
    rds, events = lmvpa.amendtimings(thisDS.copy(), beta_events[sub], contrasts)  # adding features

    # we can model out motion and just not use those betas.
    # Ridge
    if isinstance(thisContrast, basestring):
        thisContrast = [thisContrast]
    desX, rds = lmvpa.make_designmat(rds, events, time_attr='time_coords', condition_attr=thisContrast,
                                     design_kwargs={'hrf_model': 'canonical', 'drift_model': 'blank'},
                                     regr_attrs=None)
    # 'add_regs': mc_params[sub]

    desX['motion'] = make_dmtx(rds.sa['time_coords'].value, paradigm=None, add_regs=mc_params[sub], drift_model='blank')

    des = lmvpa.make_parammat(desX, hrf='canonical', zscore=True)

    # split by language and pictures
    lidx = thisDS.chunks < thisDS.sa['chunks'].unique[len(thisDS.sa['chunks'].unique) / 2]
    pidx = thisDS.chunks >= thisDS.sa['chunks'].unique[len(thisDS.sa['chunks'].unique) / 2]
    ldes = cp.copy(des)
    pdes = cp.copy(des)

    ldes.matrix = ldes.matrix[lidx]
    pdes.matrix = pdes.matrix[pidx]

    covarmat = None
    mus = None
    lwts, _, lres, lceil = bsr.bootstrap_ridge(ds=rds[lidx], des=ldes, chunklen=1, nchunks=1,
                                               cov0=None, mu0=None, part_attr='chunks', mode='test',
                                               alphas=[alphas[0]], single_alpha=True, normalpha=False,
                                               nboots=1, corrmin=.2, singcutoff=1e-10, joined=None,
                                               use_corr=True)
    print 'language ' + str(np.mean(lres))

    pwts, _, pres, pceil = bsr.bootstrap_ridge(ds=rds[pidx], des=pdes, chunklen=1, nchunks=1,
                                               cov0=None, mu0=None, part_attr='chunks', mode='test',
                                               alphas=[alphas[1]], single_alpha=True, normalpha=False,
                                               nboots=1, corrmin=.2, singcutoff=1e-10, joined=None,
                                               use_corr=True)

    # pictures within
    print 'pictures: ' + str(np.mean(pres))
    if write:
        map2nifti(thisDS, dataset.vstack([lres, pres])) \
            .to_filename(os.path.join(paths[0], 'Maps', 'Encoding', sub + '_' + roi + '_' + thisContrastStr +
                                      '_ridge_la_' + str(alphas[0]) + '_pa_' + str(alphas[1]) + '_corrs.nii.gz'))
        map2nifti(thisDS, dataset.vstack([lwts, pwts])) \
            .to_filename(os.path.join(paths[0], 'Maps', 'Encoding', sub + '_' + roi + '_' + thisContrastStr +
                                      '_ridge_la_' + str(alphas[0]) + '_pa_' + str(alphas[1]) + '_wts.nii.gz'))
        map2nifti(thisDS, dataset.vstack([lceil, pceil])) \
            .to_filename(os.path.join(paths[0], 'Maps', 'Encoding', sub + '_' + roi + '_' + thisContrastStr +
                                      '_ridge_la_' + str(alphas[0]) + '_pa_' + str(alphas[1]) + '_ceiling.nii.gz'))

    for t in testContrast:
        tstr = '+'.join(t)
        lcorr = lmvpa.testmodel(wts=lwts, des=ldes, ds=rds[lidx], tc=cp.copy(t), use_corr=True)
        pcorr = lmvpa.testmodel(wts=pwts, des=pdes, ds=rds[pidx], tc=cp.copy(t), use_corr=True)
        if write:
            map2nifti(thisDS, dataset.vstack([lcorr, pcorr])) \
            .to_filename(os.path.join(paths[0], 'Maps', 'Encoding', sub + '_' + roi + '_' + tstr +
                                      '_ridge_la_' + str(alphas[0]) + '_pa_' + str(alphas[1]) + '_test_corrs.nii.gz'))

    del lres, pres, lwts, pwts, lceil, pceil
    crossSet = thisDS.copy()
    crossSet.chunks[lidx] = 1
    crossSet.chunks[pidx] = 2
    # cwts, cres, cceil = bsr.ridge(rds[pidx], pdes, mu0=mus, cov0=covarmat,
    #                                             part_attr='chunks', mode='test', alphas=alphas[0], single_alpha=True,
    #                                             normalpha=False, corrmin=.2, singcutoff=1e-10, joined=None,
    #                                             use_corr=True)
    cwts, _, cres, cceil = bsr.bootstrap_ridge(ds=crossSet, des=des, chunklen=1, nchunks=1,
                                               cov0=None, mu0=None, part_attr='chunks', mode='test',
                                               alphas=[alphas[2]], single_alpha=True, normalpha=False,
                                               nboots=1, corrmin=.2, singcutoff=1e-10, joined=None,
                                               use_corr=True)
    for t in testContrast:
        tstr = '+'.join(t)
        ccorr = lmvpa.testmodel(wts=cwts, des=des, ds=crossSet, tc=cp.copy(t), use_corr=True)
        if write:
            map2nifti(thisDS, ccorr[0]) \
            .to_filename(os.path.join(paths[0], 'Maps', 'Encoding', sub + '_' + roi + '_' + tstr +
                                      '_P2L_ridge_alpha_' + str(alphas[2]) + '_test_corr.nii.gz'))
            map2nifti(thisDS, ccorr[1]) \
            .to_filename(os.path.join(paths[0], 'Maps', 'Encoding', sub + '_' + roi + '_' + tstr +
                                      '_L2P_ridge_alpha_' + str(alphas[2]) + '_test_corr.nii.gz'))
    print 'cross: ' + str(np.mean(cres))
    if write:
        map2nifti(thisDS, cres[0]).to_filename(
        os.path.join(paths[0], 'Maps', 'Encoding', sub + '_' + roi + '_' + thisContrastStr +
                     '_P2L_ridge_alpha_' + str(alphas[2]) + '_corr.nii.gz'))
        map2nifti(thisDS, cres[1]).to_filename(
            os.path.join(paths[0], 'Maps', 'Encoding', sub + '_' + roi + '_' + thisContrastStr +
                         '_L2P_ridge_alpha_' + str(alphas[2]) + '_corr.nii.gz'))

        map2nifti(thisDS, cwts[cwts.chunks==1]).to_filename(
            os.path.join(paths[0], 'Maps', 'Encoding', sub + '_' + roi + '_' + thisContrastStr +
                         '_P2L_ridge_alpha_' + str(alphas[2]) + '_wts.nii.gz'))
        map2nifti(thisDS, cwts[cwts.chunks==2]).to_filename(
            os.path.join(paths[0], 'Maps', 'Encoding', sub + '_' + roi + '_' + thisContrastStr +
                         '_L2P_ridge_alpha' + str(alphas[2]) + '_wts.nii.gz'))

        map2nifti(thisDS, cceil[0]).to_filename(
            os.path.join(paths[0], 'Maps', 'Encoding', sub + '_' + roi + '_' + thisContrastStr +
                         '_P2L_ridge_alpha_' + str(alphas[2]) + '_ceiling.nii.gz'))
        map2nifti(thisDS, cceil[1]).to_filename(
            os.path.join(paths[0], 'Maps', 'Encoding', sub + '_' + roi + '_' + thisContrastStr +
                         '_L2P_ridge_alpha_' + str(alphas[2]) + '_ceiling.nii.gz'))
    del cres, cwts, cceil
Exemplo n.º 27
0
def execute_spm_multimodal_fmri_glm(data, reg_motion=False):
    reg_motion = reg_motion and 'realignment_parameters' in data

    # experimental paradigm meta-params
    stats_start_time = time.ctime()
    tr = 2.
    drift_model = 'Cosine'
    hrf_model = 'Canonical With Derivative'
    hfcut = 128.

    # make design matrices
    design_matrices = []
    for x in xrange(2):
        n_scans = data['func'][x].shape[-1]

        timing = scipy.io.loadmat(data['trials_ses%i' % (x + 1)],
                                  squeeze_me=True, struct_as_record=False)

        faces_onsets = timing['onsets'][0].ravel()
        scrambled_onsets = timing['onsets'][1].ravel()
        onsets = np.hstack((faces_onsets, scrambled_onsets))
        onsets *= tr  # because onsets were reporting in 'scans' units
        conditions = ['faces'] * len(faces_onsets) + ['scrambled'] * len(
            scrambled_onsets)
        paradigm = EventRelatedParadigm(conditions, onsets)
        frametimes = np.linspace(0, (n_scans - 1) * tr, n_scans)

        add_reg_names = None
        add_regs = None
        if reg_motion:
            add_reg_names = ['tx', 'ty', 'tz', 'rx', 'ry', 'rz']
            add_regs = np.loadtxt(data['realignment_parameters'][x])
            if isinstance(add_regs):
                add_regs = np.loadtxt(add_regs)
        design_matrix = make_dmtx(
            frametimes,
            paradigm, hrf_model=hrf_model,
            drift_model=drift_model, hfcut=hfcut,
            add_reg_names=add_reg_names,
            add_regs=add_regs
            )

        design_matrices.append(design_matrix)

    # specify contrasts
    contrasts = {}
    n_columns = len(design_matrix.names)
    for i in xrange(paradigm.n_conditions):
        contrasts['%s' % design_matrix.names[2 * i]] = np.eye(n_columns)[2 * i]

    # more interesting contrasts
    contrasts['faces-scrambled'] = contrasts['faces'] - contrasts['scrambled']
    contrasts['scrambled-faces'] = contrasts['scrambled'] - contrasts['faces']
    contrasts['effects_of_interest'] = contrasts[
        'faces'] + contrasts['scrambled']

    # we've thesame contrasts over sessions, so let's replicate
    contrasts = dict((contrast_id, [contrast_val] * 2)
                     for contrast_id, contrast_val in contrasts.iteritems())

    # fit GLM
    print('\r\nFitting a GLM (this takes time)...')
    fmri_glm = FMRILinearModel([load_4D_img(sess_func)
                                for sess_func in data['func']],
                               [dmat.matrix for dmat in design_matrices],
                               mask='compute')
    fmri_glm.fit(do_scaling=True, model='ar1')

    # save computed mask
    mask_path = os.path.join(data['output_dir'], "mask.nii.gz")
    print "Saving mask image %s" % mask_path
    nibabel.save(fmri_glm.mask, mask_path)

    # compute bg unto which activation will be projected
    anat_img = load_vol(data['anat'])

    anat = anat_img.get_data()

    if anat.ndim == 4:
        anat = anat[..., 0]

    anat_affine = anat_img.get_affine()

    print "Computing contrasts .."
    z_maps = {}
    for contrast_id, contrast_val in contrasts.iteritems():
        print "\tcontrast id: %s" % contrast_id
        z_map, t_map, eff_map, var_map = fmri_glm.contrast(
            contrast_val,
            con_id=contrast_id,
            output_z=True,
            output_stat=True,
            output_effects=True,
            output_variance=True,
            )

        # store stat maps to disk
        for dtype, out_map in zip(['z', 't', 'effects', 'variance'],
                                  [z_map, t_map, eff_map, var_map]):
            map_dir = os.path.join(
                data['output_dir'], '%s_maps' % dtype)
            if not os.path.exists(map_dir):
                os.makedirs(map_dir)
            map_path = os.path.join(
                map_dir, '%s.nii.gz' % contrast_id)
            nibabel.save(out_map, map_path)

            # collect zmaps for contrasts we're interested in
            if dtype == 'z':
                z_maps[contrast_id] = map_path

            print "\t\t%s map: %s" % (dtype, map_path)

    # do stats report
    data['stats_report_filename'] = os.path.join(data['reports_output_dir'],
                                                 "report_stats.html")
    contrasts = dict((contrast_id, contrasts[contrast_id])
                     for contrast_id in z_maps.keys())
    generate_subject_stats_report(
        data['stats_report_filename'],
        contrasts,
        z_maps,
        fmri_glm.mask,
        anat=anat,
        anat_affine=anat_affine,
        design_matrices=design_matrices,
        subject_id=data['subject_id'],
        cluster_th=15,  # we're only interested in this 'large' clusters
        start_time=stats_start_time,

        # additional ``kwargs`` for more informative report
        paradigm=paradigm.__dict__,
        TR=tr,
        n_scans=n_scans,
        hfcut=hfcut,
        frametimes=frametimes,
        drift_model=drift_model,
        hrf_model=hrf_model,
        )

    ProgressReport().finish_dir(data['reports_output_dir'])

    print "\r\nStatistic report written to %s\r\n" % data[
        'stats_report_filename']

    return data
_duration = 6
epoch_duration = _duration * tr
conditions = ['rest', 'active'] * 8
duration = epoch_duration * np.ones(len(conditions))
onset = np.linspace(0, (len(conditions) - 1) * epoch_duration, len(conditions))
paradigm = BlockParadigm(con_id=conditions, onset=onset, duration=duration)
hfcut = 2 * 2 * epoch_duration

# construct design matrix
nscans = len(subject_data.func[0])
frametimes = np.linspace(0, (nscans - 1) * tr, nscans)
drift_model = 'Cosine'
hrf_model = 'Canonical With Derivative'
design_matrix = make_dmtx(frametimes,
                          paradigm,
                          hrf_model=hrf_model,
                          drift_model=drift_model,
                          hfcut=hfcut)

# plot and save design matrix
ax = design_matrix.show()
ax.set_position([.05, .25, .9, .65])
ax.set_title('Design matrix')
dmat_outfile = os.path.join(subject_data.output_dir, 'design_matrix.png')
pl.savefig(dmat_outfile, bbox_inches="tight", dpi=200)

# specify contrasts
contrasts = {}
n_columns = len(design_matrix.names)
for i in xrange(paradigm.n_conditions):
    contrasts['%s' % design_matrix.names[2 * i]] = np.eye(n_columns)[2 * i]
Exemplo n.º 29
0
def make_designmat(ds,
                   eorig,
                   time_attr='time_coords',
                   condition_attr='targets',
                   design_kwargs=None,
                   regr_attrs=None):
    # make glm regressors for all attributes. so loop through condition_attr and add them all...
    import copy
    from nipy.modalities.fmri.design_matrix import make_dmtx
    import numpy as np
    # Decide/device condition attribute on which GLM will actually be done
    if isinstance(condition_attr, basestring):
        # must be a list/tuple/array for the logic below
        condition_attr = [condition_attr]

    e = copy.deepcopy(eorig)  # since we are modifying in place
    glm_condition_attrs = []
    for i, con in enumerate(condition_attr):
        glm_condition_attr = 'regressors_' + str(con)
        glm_condition_attrs.append(glm_condition_attr)
        for ei in e:
            if glm_condition_attr in ei:
                raise ValueError(
                    "Event %s already has %s defined.  Should not "
                    "happen.  Choose another name if defined it" %
                    (ei, glm_condition_attr))
            ei[glm_condition_attr] = \
                'glm_label_' + str(con) + '_' + '+'.join(str(ei[c]) for c in [con])

    evvars = events2dict(e)
    add_paradigm_kwargs = {}
    if 'amplitude' in evvars:
        add_paradigm_kwargs['amplitude'] = evvars['amplitude']
    if design_kwargs is None:
        design_kwargs = {}
    if regr_attrs is not None:
        names = []
        regrs = []
        for attr in regr_attrs:
            regr = ds.sa[attr].value
            # add rudimentary dimension for easy hstacking later on
            if regr.ndim < 2:
                regr = regr[:, np.newaxis]
            if regr.shape[1] == 1:
                names.append(attr)
            else:
                #  add one per each column of the regressor
                for i in xrange(regr.shape[1]):
                    names.append("%s.%d" % (attr, i))
            regrs.append(regr)
        regrs = np.hstack(regrs)

        if 'add_regs' in design_kwargs:
            design_kwargs['add_regs'] = np.hstack(
                (design_kwargs['add_regs'], regrs))
        else:
            design_kwargs['add_regs'] = regrs
        if 'add_reg_names' in design_kwargs:
            design_kwargs['add_reg_names'].extend(names)
        else:
            design_kwargs['add_reg_names'] = names

    X = {}
    for ci, con in enumerate(condition_attr):
        # create paradigm
        if 'duration' in evvars:
            from nipy.modalities.fmri.experimental_paradigm import BlockParadigm
            # NiPy considers everything with a duration as a block paradigm
            paradigm = BlockParadigm(con_id=evvars[glm_condition_attrs[ci]],
                                     onset=evvars['onset'],
                                     duration=evvars['duration'],
                                     **add_paradigm_kwargs)
        else:
            from nipy.modalities.fmri.experimental_paradigm \
                import EventRelatedParadigm
            paradigm = EventRelatedParadigm(
                con_id=evvars[glm_condition_attrs[ci]],
                onset=evvars['onset'],
                **add_paradigm_kwargs)
        X[con] = make_dmtx(ds.sa[time_attr].value,
                           paradigm=paradigm,
                           **design_kwargs)
        for i, reg in enumerate(X[con].names):
            ds.sa[reg] = X[con].matrix[:, i]
        if con in ds.sa.keys():
            ds.sa.pop(con)

        for reg in ds.sa.keys():
            if str(con) + '0' in reg:
                ds.sa['glm_label_probe'] = ds.sa.pop(reg)

    # concatenate X... add chunk regressors...
    # if 'chunks' in ds.sa.keys():
    #     for i in ds.sa['chunks'].unique:
    #         ds.sa['glm_label_chunks' + str(i)] = np.array(ds.sa['chunks'].value == i, dtype=np.int)
    return X, ds
Exemplo n.º 30
0
def _preprocess_and_analysis_subject(subject_data,
                                     slicer='z',
                                     cut_coords=6,
                                     threshold=3.,
                                     cluster_th=15,
                                     **preproc_params):
    """
    Preprocesses the subject and then fits (mass-univariate) GLM thereup.

    """

    # sanitize run_ids:
    # Sub14/BOLD/Run_02/fMR09029-0004-00010-000010-01.nii is garbage,

    # for example
    run_ids = range(9)
    if subject_data['subject_id'] == "Sub14":
        run_ids = [0] + range(2, 9)
        subject_data['func'] = [subject_data['func'][0]
                                ] + subject_data['func'][2:]
        subject_data['session_id'] = [subject_data['session_id'][0]
                                      ] + subject_data['session_id'][2:]

    # sanitize subject output dir
    if not 'output_dir' in subject_data:
        subject_data['output_dir'] = os.path.join(output_dir,
                                                  subject_data['subject_id'])

    # preprocess the data
    subject_data = do_subject_preproc(subject_data, **preproc_params)
    # chronometry
    stats_start_time = pretty_time()

    # to-be merged lists, one item per run
    paradigms = []
    frametimes_list = []
    design_matrices = []  # one
    list_of_contrast_dicts = []  # one dict per run
    n_scans = []
    for run_id in run_ids:
        _n_scans = len(subject_data.func[run_id])
        n_scans.append(_n_scans)

        # make paradigm
        paradigm = make_paradigm(getattr(subject_data, 'timing')[run_id])

        # make design matrix
        tr = 2.
        drift_model = 'Cosine'
        hrf_model = 'Canonical With Derivative'
        hfcut = 128.
        frametimes = np.linspace(0, (_n_scans - 1) * tr, _n_scans)
        design_matrix = make_dmtx(
            frametimes,
            paradigm,
            hrf_model=hrf_model,
            drift_model=drift_model,
            hfcut=hfcut,
            add_regs=np.loadtxt(
                getattr(subject_data, 'realignment_parameters')[run_id]),
            add_reg_names=[
                'Translation along x axis', 'Translation along yaxis',
                'Translation along z axis', 'Rotation along x axis',
                'Rotation along y axis', 'Rotation along z axis'
            ])

        # import matplotlib.pyplot as plt
        # design_matrix.show()
        # plt.show()

        paradigms.append(paradigm)
        design_matrices.append(design_matrix)
        frametimes_list.append(frametimes)
        n_scans.append(_n_scans)

        # specify contrasts
        contrasts = {}
        n_columns = len(design_matrix.names)
        for i in xrange(paradigm.n_conditions):
            contrasts['%s' %
                      design_matrix.names[2 * i]] = np.eye(n_columns)[2 * i]

        # more interesting contrasts"""
        contrasts['Famous-Unfamiliar'] = contrasts['Famous'] - contrasts[
            'Unfamiliar']
        contrasts['Unfamiliar-Famous'] = -contrasts['Famous-Unfamiliar']
        contrasts[
            'Famous-Scrambled'] = contrasts['Famous'] - contrasts['Scrambled']
        contrasts['Scrambled-Famous'] = -contrasts['Famous-Scrambled']
        contrasts['Unfamiliar-Scrambled'] = contrasts[
            'Unfamiliar'] - contrasts['Scrambled']
        contrasts['Scrambled-Unfamiliar'] = -contrasts['Unfamiliar-Scrambled']

        list_of_contrast_dicts.append(contrasts)

    # importat maps
    z_maps = {}
    effects_maps = {}

    # fit GLM
    print('\r\nFitting a GLM (this takes time) ..')
    fmri_glm = FMRILinearModel(
        [nibabel.concat_images(sess_func) for sess_func in subject_data.func],
        [design_matrix.matrix for design_matrix in design_matrices],
        mask='compute')
    fmri_glm.fit(do_scaling=True, model='ar1')

    print "... done.\r\n"

    # save computed mask
    mask_path = os.path.join(subject_data.output_dir, "mask.nii.gz")

    print "Saving mask image to %s ..." % mask_path
    nibabel.save(fmri_glm.mask, mask_path)
    print "... done.\r\n"

    # replicate contrasts across runs
    contrasts = dict(
        (cid, [contrasts[cid] for contrasts in list_of_contrast_dicts])
        for cid, cval in contrasts.iteritems())

    # compute effects
    for contrast_id, contrast_val in contrasts.iteritems():
        print "\tcontrast id: %s" % contrast_id
        z_map, eff_map = fmri_glm.contrast(contrast_val,
                                           con_id=contrast_id,
                                           output_z=True,
                                           output_stat=False,
                                           output_effects=True,
                                           output_variance=False)

        # store stat maps to disk
        for map_type, out_map in zip(['z', 'effects'], [z_map, eff_map]):
            map_dir = os.path.join(subject_data.output_dir,
                                   '%s_maps' % map_type)
            if not os.path.exists(map_dir):
                os.makedirs(map_dir)
            map_path = os.path.join(map_dir, '%s.nii.gz' % contrast_id)
            print "\t\tWriting %s ..." % map_path
            nibabel.save(out_map, map_path)

            # collect zmaps for contrasts we're interested in
            if map_type == 'z':
                z_maps[contrast_id] = map_path

            if map_type == 'effects':
                effects_maps[contrast_id] = map_path

    # remove repeated contrasts
    contrasts = dict((cid, cval[0]) for cid, cval in contrasts.iteritems())

    # do stats report
    stats_report_filename = os.path.join(
        getattr(subject_data, 'reports_output_dir', subject_data.output_dir),
        "report_stats.html")
    generate_subject_stats_report(
        stats_report_filename,
        contrasts,
        z_maps,
        fmri_glm.mask,
        threshold=threshold,
        cluster_th=cluster_th,
        slicer=slicer,
        cut_coords=cut_coords,
        design_matrices=design_matrices,
        subject_id=subject_data.subject_id,
        start_time=stats_start_time,
        title="GLM for subject %s" % subject_data.subject_id,

        # additional ``kwargs`` for more informative report
        TR=tr,
        n_scans=n_scans,
        hfcut=hfcut,
        drift_model=drift_model,
        hrf_model=hrf_model,
        paradigm=dict(("Run_%02i" % (run_id + 1), paradigms[run_id])
                      for run_id in run_ids),
        frametimes=dict(("Run_%02i" % (run_id + 1), frametimes_list[run_id])
                        for run_id in run_ids),
        # fwhm=fwhm
    )

    ProgressReport().finish_dir(subject_data.output_dir)
    print "\r\nStatistic report written to %s\r\n" % stats_report_filename

    return contrasts, effects_maps, z_maps, mask_path
Exemplo n.º 31
0
def first_level(subject_dic):
    # experimental paradigm meta-params
    stats_start_time = time.ctime()
    tr = 2.4
    drift_model = 'blank'
    hrf_model = 'canonical'  # hemodynamic reponse function
    hfcut = 128.
    n_scans = 128

    # make design matrices
    mask_images = []
    design_matrices = []
    fmri_files = subject_dic['func']

    for x in xrange(len(fmri_files)):
        paradigm = paradigm_contrasts.localizer_paradigm()

        # build design matrix
        frametimes = np.linspace(0, (n_scans - 1) * tr, n_scans)
        design_matrix = make_dmtx(
            frametimes,
            paradigm,
            hrf_model=hrf_model,
            drift_model=drift_model,
            hfcut=hfcut,
        )
        design_matrices.append(design_matrix)

    # Specify contrasts
    contrasts = paradigm_contrasts.localizer_contrasts(design_matrix)

    #create output directory
    subject_session_output_dir = os.path.join(subject_dic['output_dir'],
                                              'res_stats')

    if not os.path.exists(subject_session_output_dir):
        os.makedirs(subject_session_output_dir)

    # Fit GLM
    print 'Fitting a GLM (this takes time)...'
    fmri_glm = FMRILinearModel(
        fmri_files,
        [design_matrix.matrix for design_matrix in design_matrices],
        mask='compute')
    fmri_glm.fit(do_scaling=True, model='ar1')

    # save computed mask
    mask_path = os.path.join(subject_session_output_dir, "mask.nii.gz")
    print "Saving mask image %s" % mask_path
    nibabel.save(fmri_glm.mask, mask_path)
    mask_images.append(mask_path)

    # compute contrasts
    z_maps = {}
    effects_maps = {}
    for contrast_id, contrast_val in contrasts.iteritems():
        print "\tcontrast id: %s" % contrast_id
        z_map, t_map, effects_map, var_map = fmri_glm.contrast(
            [contrast_val] * 1,
            con_id=contrast_id,
            output_z=True,
            output_stat=True,
            output_effects=True,
            output_variance=True)

        # store stat maps to disk
        for map_type, out_map in zip(['z', 't', 'effects', 'variance'],
                                     [z_map, t_map, effects_map, var_map]):
            map_dir = os.path.join(subject_session_output_dir,
                                   '%s_maps' % map_type)
            if not os.path.exists(map_dir):
                os.makedirs(map_dir)
            map_path = os.path.join(
                map_dir,
                '%s%s.nii.gz' % (subject_dic['subject_id'], contrast_id))
            print "\t\tWriting %s ..." % map_path
            nibabel.save(out_map, map_path)

            # collect zmaps for contrasts we're interested in
            if map_type == 'z':
                z_maps[contrast_id] = map_path
            if map_type == 'effects':
                effects_maps[contrast_id] = map_path

    # do stats report
    anat_img = nibabel.load(subject_dic['anat'])
    stats_report_filename = os.path.join(subject_session_output_dir,
                                         "report_stats.html")

    generate_subject_stats_report(
        stats_report_filename,
        contrasts,
        z_maps,
        fmri_glm.mask,
        threshold=2.3,
        cluster_th=15,
        anat=anat_img.get_data(),
        anat_affine=anat_img.get_affine(),
        design_matrices=design_matrix,
        subject_id="sub001",
        start_time=stats_start_time,
        title="GLM for subject %s" % subject_dic['session_id'],

        # additional ``kwargs`` for more informative report
        paradigm=paradigm.__dict__,
        TR=tr,
        n_scans=n_scans,
        hfcut=hfcut,
        frametimes=frametimes,
        drift_model=drift_model,
        hrf_model=hrf_model,
    )

    ProgressReport().finish_dir(subject_session_output_dir)
    print "Statistic report written to %s\r\n" % stats_report_filename
    return z_maps
Exemplo n.º 32
0
            fmri_series = [
                os.path.join(fmri_dir, '%s_series_%s_lh_smooth5.gii' %
                             (subject, session)) for session in sessions]
        elif side == 'right':
            fmri_series = [
                os.path.join(fmri_dir, '%s_series_%s_rh_smooth5.gii' %
                             (subject, session)) for session in sessions]

        # compute the mask
        if side == False:
            mean_img = glob.glob(os.path.join(fmri_dir, 'mean*.nii'))[0]
            mask_array = compute_mask_files(mean_img, epi_mask, True,
                                            inf_threshold, sup_threshold)[0]

        # get the contrasts
        n_reg = make_dmtx(frametimes, add_regs=reg_matrix,
                          drift_model=drift_model, hfcut=hfcut).matrix.shape[1]

        contrasts, all_reg = make_contrasts(sessions, n_reg)
        contrast_obj = {}

        for (sess, (session, fmri_data)) in enumerate(zip(
                sessions, fmri_series)):
            # create design matrices
            reg = all_reg[2 * sess: 2 * sess + 2] # fixme
            design_matrix = make_dmtx(
                frametimes, add_regs=reg_matrix, add_reg_names=reg,
                drift_model=drift_model, hfcut=hfcut)

            # plot the design matrix
            ax = design_matrix.show()
            ax.set_position([.05, .25, .9, .65])
Exemplo n.º 33
0
def first_level(subject_dic):
    # experimental paradigm meta-params
    stats_start_time = time.ctime()
    tr = 2.4
    drift_model = 'blank'
    hrf_model = 'canonical'  # hemodynamic reponse function
    hfcut = 128.
    n_scans = 128

    # make design matrices
    mask_images = []
    design_matrices = []
    fmri_files = subject_dic['func']

    for x in xrange(len(fmri_files)):
        paradigm = paradigm_contrasts.localizer_paradigm()

        # build design matrix
        frametimes = np.linspace(0, (n_scans - 1) * tr, n_scans)
        design_matrix = make_dmtx(
            frametimes,
            paradigm, hrf_model=hrf_model,
            drift_model=drift_model, hfcut=hfcut,
            )
        design_matrices.append(design_matrix)

    # Specify contrasts
    contrasts = paradigm_contrasts.localizer_contrasts(design_matrix)

    #create output directory
    subject_session_output_dir = os.path.join(subject_dic['output_dir'],
                                                  'res_stats')

    if not os.path.exists(subject_session_output_dir):
             os.makedirs(subject_session_output_dir)

    # Fit GLM
    print 'Fitting a GLM (this takes time)...'
    fmri_glm = FMRILinearModel(fmri_files,
                               [design_matrix.matrix
                                for design_matrix in design_matrices],
                               mask='compute'
                               )
    fmri_glm.fit(do_scaling=True, model='ar1')

    # save computed mask
    mask_path = os.path.join(subject_session_output_dir,
                             "mask.nii.gz")
    print "Saving mask image %s" % mask_path
    nibabel.save(fmri_glm.mask, mask_path)
    mask_images.append(mask_path)

    # compute contrasts
    z_maps = {}
    effects_maps = {}
    for contrast_id, contrast_val in contrasts.iteritems():
        print "\tcontrast id: %s" % contrast_id
        z_map, t_map, effects_map, var_map = fmri_glm.contrast(
            [contrast_val] * 1,
            con_id=contrast_id,
            output_z=True,
            output_stat=True,
            output_effects=True,
            output_variance=True
            )

        # store stat maps to disk
        for map_type, out_map in zip(['z', 't', 'effects', 'variance'],
                                  [z_map, t_map, effects_map, var_map]):
            map_dir = os.path.join(
                subject_session_output_dir, '%s_maps' % map_type)
            if not os.path.exists(map_dir):
                os.makedirs(map_dir)
            map_path = os.path.join(
                map_dir, '%s%s.nii.gz' %(subject_dic['subject_id'], contrast_id))
            print "\t\tWriting %s ..." % map_path
            nibabel.save(out_map, map_path)

            # collect zmaps for contrasts we're interested in
            if map_type == 'z':
                z_maps[contrast_id] = map_path
            if map_type == 'effects':
                effects_maps[contrast_id] = map_path

    # do stats report
    anat_img = nibabel.load(subject_dic['anat'])
    stats_report_filename = os.path.join(subject_session_output_dir,
                                         "report_stats.html")
                                         
    generate_subject_stats_report(
        stats_report_filename,
        contrasts,
        z_maps,
        fmri_glm.mask,
        threshold=2.3,
        cluster_th=15,
        anat=anat_img,
        anat_affine=anat_img.get_affine(),
        design_matrices=design_matrix,
        subject_id="sub001",
        start_time=stats_start_time,
        title="GLM for subject %s" % subject_dic['session_id'],
    
        # additional ``kwargs`` for more informative report
        paradigm=paradigm.__dict__,
        TR=tr,
        n_scans=n_scans,
        hfcut=hfcut,
        frametimes=frametimes,
        drift_model=drift_model,
        hrf_model=hrf_model,
        )
    
    ProgressReport().finish_dir(subject_session_output_dir)
    print "Statistic report written to %s\r\n" % stats_report_filename
    return z_maps
Exemplo n.º 34
0
def glm_nipy(fmri_data,
             contrasts=None,
             hrf_model='Canonical',
             drift_model='Cosine',
             hfcut=128,
             residuals_model='spherical',
             fit_method='ols',
             fir_delays=[0],
             rescale_results=False,
             rescale_factor=None):
    """
    Perform a GLM analysis on fMRI data using the implementation of Nipy.

    Args:
        fmri_data (pyhrf.core.FmriData): the input fMRI data defining the
            paradigm and the measured 3D+time signal.
        contrasts (dict): keys are contrast labels and values are arithmetic
            expressions involving regressor names. Valid names are:
            * names of experimental conditions as defined in fmri_data
            * constant
        hrf_model: "Canonical", "Canonical with Derivative", "FIR"
        residuals_model: "spherical", "ar1"
        fit_method: "ols", "kalman" (If residuals_model is "ar1" then method
            is set to "kalman" and this argument is ignored)
        fir_delays: list of integers indicating the delay of each FIR coefficient
                    (in terms of scans). Eg if TR = 2s. and we want a FIR
                    duration of 20s.: fir_delays=range(10)
    Returns:
        (glm instance, design matrix, dict of contrasts of objects)

    Examples:
    >>> from pyhrf.core import FmriData
    >>> from pyhrf.glm import glm_nipy
    >>> g,dmtx,con = glm_nipy(FmriData.from_vol_ui())
    >>> g,dmtx,con = glm_nipy(FmriData.from_vol_ui(), \
                              contrasts={'A-V':'audio-video'})
    """

    paradigm = fmri_data.paradigm.to_nipy_paradigm()

    # BOLD data
    Y = fmri_data.bold.T
    n_scans = Y.shape[1]

    # Design matrix
    frametimes = np.linspace(0, (n_scans - 1) * fmri_data.tr, n_scans)
    design_matrix = dm.make_dmtx(frametimes,
                                 paradigm,
                                 hrf_model=hrf_model,
                                 drift_model=drift_model,
                                 hfcut=hfcut,
                                 fir_delays=fir_delays)

    ns, nr = design_matrix.matrix.shape
    logger.info('Design matrix built with %d regressors:', nr)
    for rn in design_matrix.names:
        logger.info('    - %s', rn)

    # GLM fit
    my_glm = glm.glm()
    logger.info('Fit GLM - method: %s, residual model: %s', fit_method,
                residuals_model)
    my_glm.fit(Y.T,
               design_matrix.matrix,
               method=fit_method,
               model=residuals_model)

    from pyhrf.tools import map_dict
    from pyhrf.paradigm import contrasts_to_spm_vec

    if rescale_results:
        if 1:
            if rescale_results and rescale_factor is None:
                # Rescale by the norm of each regressor in the design matrix
                dm_reg_norms = (design_matrix.matrix**2).sum(0)**.5
                logger.info(
                    'GLM results (beta and con effects) are '
                    'rescaled by reg norm. Weights: %s ', str(dm_reg_norms))

                for ib in xrange(my_glm.beta.shape[0]):
                    my_glm.beta[ib] = my_glm.beta[ib] * dm_reg_norms[ib]

            else:
                logger.info('GLM results (beta and con effects) are '
                            'rescaled by input scale factor.')

                # Use input rescale factors:
                for ib in xrange(rescale_factor.shape[0]):
                    my_glm.beta[ib] = my_glm.beta[ib] * rescale_factor[ib]
                    # TOCHECK: nvbeta seems to be a covar matrix between reg
                    # -> we dont get position-specific variances ...
                    #my_glm.nvbeta[ib,:] = my_glm.nvbeta[ib,:] * rescale_factor[ib]**2

    if contrasts is not None:
        con_vectors = contrasts_to_spm_vec(design_matrix.names, contrasts)
        # if rescale_results:
        #     for con_vec in con_vectors.itervalues():
        #         con_vec *= dm_reg_norms
        contrast_result = map_dict(my_glm.contrast, con_vectors)
    else:
        contrast_result = None

    return my_glm, design_matrix, contrast_result

    # actually: not possible to compute PPM from glm results
    # Should relaunch estimation with propoer model under SPM
    # def PPMcalculus_glmWN(beta, var_beta, dm, threshold_value):
    '''
Exemplo n.º 35
0
        img = nb.load(smooth_file)

        #-----------------------------------------------------------------
        # Construct a design matrix for each test
        #-----------------------------------------------------------------
        print('  Make design matrix...')
        print('    Conditions:\n      {}'.format(conditions))
        print('    Amplitudes:\n      {}'.format(amplitudes))
        print('    Onsets:\n      {}'.format(onsets))
        print('    Durations:\n      {}'.format(durations))
        paradigm = BlockParadigm(con_id=conditions, onset=onsets,
                                 duration=durations, amplitude=amplitudes)
        frametimes = np.linspace(0, n_images-1, n_images)

        if ntest < 3:
            dmtx = make_dmtx(frametimes, paradigm, hrf_model='FIR',
                             drift_model='polynomial', drift_order=2, hfcut=np.inf)
        else:
            dmtx = make_dmtx(frametimes, paradigm, hrf_model='FIR', hfcut=np.inf)
        design_matrix = dmtx.matrix

        # Plot the design matrix
        if plot_design_matrix:
            fig1 = mp.figure(figsize=(10, 6))
            dmtx.show()
            mp.title(desc)
            fig1_file = os.path.join(out_path, label + 'design_matrix_test' + \
                                               str(ntest) + '.png')
            mp.savefig(fig1_file)

        #-----------------------------------------------------------------
        # Mean-scale, de-mean and multiply data by 100
Exemplo n.º 36
0
Arquivo: glm.py Projeto: ainafp/pyhrf
def glm_nipy(fmri_data, contrasts=None, hrf_model='Canonical',
             drift_model='Cosine', hfcut=128,
             residuals_model='spherical', fit_method='ols',
             fir_delays=[0],
             rescale_results=False, rescale_factor=None):
    """
    Perform a GLM analysis on fMRI data using the implementation of Nipy.

    Args:
        fmri_data (pyhrf.core.FmriData): the input fMRI data defining the
            paradigm and the measured 3D+time signal.
        contrasts (dict): keys are contrast labels and values are arithmetic
            expressions involving regressor names. Valid names are:
            * names of experimental conditions as defined in fmri_data
            * constant
        hrf_model: "Canonical", "Canonical with Derivative", "FIR"
        residuals_model: "spherical", "ar1"
        fit_method: "ols", "kalman" (If residuals_model is "ar1" then method
            is set to "kalman" and this argument is ignored)
        fir_delays: list of integers indicating the delay of each FIR coefficient
                    (in terms of scans). Eg if TR = 2s. and we want a FIR
                    duration of 20s.: fir_delays=range(10)
    Returns:
        (glm instance, design matrix, dict of contrasts of objects)

    Examples:
    >>> from pyhrf.core import FmriData
    >>> from pyhrf.glm import glm_nipy
    >>> g,dmtx,con = glm_nipy(FmriData.from_vol_ui())
    >>> g,dmtx,con = glm_nipy(FmriData.from_vol_ui(), \
                              contrasts={'A-V':'audio-video'})
    """

    paradigm = fmri_data.paradigm.to_nipy_paradigm()

    # BOLD data
    Y = fmri_data.bold.T
    n_scans = Y.shape[1]

    # Design matrix
    frametimes = np.linspace(0, (n_scans - 1) * fmri_data.tr, n_scans)
    design_matrix = dm.make_dmtx(frametimes, paradigm,
                                 hrf_model=hrf_model,
                                 drift_model=drift_model, hfcut=hfcut,
                                 fir_delays=fir_delays)

    ns, nr = design_matrix.matrix.shape
    logger.info('Design matrix built with %d regressors:', nr)
    for rn in design_matrix.names:
        logger.info('    - %s', rn)

    # GLM fit
    my_glm = glm.glm()
    logger.info('Fit GLM - method: %s, residual model: %s', fit_method,
                residuals_model)
    my_glm.fit(Y.T, design_matrix.matrix, method=fit_method,
               model=residuals_model)

    from pyhrf.tools import map_dict
    from pyhrf.paradigm import contrasts_to_spm_vec

    if rescale_results:
        if 1:
            if rescale_results and rescale_factor is None:
                # Rescale by the norm of each regressor in the design matrix
                dm_reg_norms = (design_matrix.matrix ** 2).sum(0) ** .5
                logger.info('GLM results (beta and con effects) are '
                            'rescaled by reg norm. Weights: %s ',
                            str(dm_reg_norms))

                for ib in xrange(my_glm.beta.shape[0]):
                    my_glm.beta[ib] = my_glm.beta[ib] * dm_reg_norms[ib]

            else:
                logger.info('GLM results (beta and con effects) are '
                            'rescaled by input scale factor.')

                # Use input rescale factors:
                for ib in xrange(rescale_factor.shape[0]):
                    my_glm.beta[ib] = my_glm.beta[ib] * rescale_factor[ib]
                    # TOCHECK: nvbeta seems to be a covar matrix between reg
                    # -> we dont get position-specific variances ...
                    #my_glm.nvbeta[ib,:] = my_glm.nvbeta[ib,:] * rescale_factor[ib]**2

    if contrasts is not None:
        con_vectors = contrasts_to_spm_vec(design_matrix.names, contrasts)
        # if rescale_results:
        #     for con_vec in con_vectors.itervalues():
        #         con_vec *= dm_reg_norms
        contrast_result = map_dict(my_glm.contrast, con_vectors)
    else:
        contrast_result = None

    return my_glm, design_matrix, contrast_result

# actually: not possible to compute PPM from glm results
# Should relaunch estimation with propoer model under SPM
# def PPMcalculus_glmWN(beta, var_beta, dm, threshold_value):
    '''
Exemplo n.º 37
0
def runsub(sub, thisContrast, thisContrastStr,
           filterLen, filterOrd,
           paramEst, chunklen, alphas=np.logspace(0, 3, 20), debug=False, write=False, roi='grayMatter'):
    thisSub = {sub: subList[sub]}
    mc_params = lmvpa.loadmotionparams(paths, thisSub)
    beta_events = lmvpa.loadevents(paths, thisSub)
    dsdict = lmvpa.loadsubdata(paths, thisSub, m=roi, c='trial_type')
    thisDS = dsdict[sub]

    # savitsky golay filtering
    sg.sg_filter(thisDS, filterLen, filterOrd)
    # gallant group zscores before regression.

    # zscore w.r.t. rest trials
    # zscore(thisDS, param_est=('targets', ['rest']), chunks_attr='chunks')
    # zscore entire set. if done chunk-wise, there is no double-dipping (since we leave a chunk out at a time).
    zscore(thisDS, chunks_attr='chunks')

    # kay method: leave out a model run, use it to fit an HRF for each voxel
    # huth method: essentially use FIR
    # mumford method: deconvolution with canonical HRF

    # refit events and regress...
    # get timing data from timing files
    # rds, events = lmvpa.amendtimings(thisDS.copy(), beta_events[sub])
    rds, events = lmvpa.amendtimings(thisDS.copy(), beta_events[sub], contrasts) # adding features

    # we can model out motion and just not use those betas.
    # Ridge
    if isinstance(thisContrast, basestring):
        thisContrast = [thisContrast]
    # instead of binarizing each one, make them parametric
    desX, rds = lmvpa.make_designmat(rds, events, time_attr='time_coords', condition_attr=thisContrast,
                                     design_kwargs={'hrf_model': 'canonical', 'drift_model': 'blank'},
                                     regr_attrs=None)
    # want to collapse ap and cr, but have anim separate
    desX['motion'] = make_dmtx(rds.sa['time_coords'].value, paradigm=None, add_regs=mc_params[sub], drift_model='blank')

    des = lmvpa.make_parammat(desX, hrf='canonical', zscore=True)

    # set chunklen and nchunks
    # split by language and pictures
    lidx = thisDS.chunks < thisDS.sa['chunks'].unique[len(thisDS.sa['chunks'].unique) / 2]
    pidx = thisDS.chunks >= thisDS.sa['chunks'].unique[len(thisDS.sa['chunks'].unique) / 2]
    ldes = cp.copy(des)
    pdes = cp.copy(des)

    ldes.matrix = ldes.matrix[lidx]
    pdes.matrix = pdes.matrix[pidx]
    nchunks = int(len(thisDS)*paramEst / chunklen)
    nboots=50
    covarmat = None
    mus = None
    lwts, lalphas, lres, lceil = bsr.bootstrap_ridge(rds[lidx], ldes, chunklen=chunklen, nchunks=nchunks,
                                              cov0=covarmat, mu0=mus, part_attr='chunks', mode='test',
                                              alphas=alphas, single_alpha=True, normalpha=False,
                                              nboots=nboots, corrmin=.2, singcutoff=1e-10, joined=None,
                                              plot=debug, use_corr=True)

    pwts, palphas, pres, pceil = bsr.bootstrap_ridge(rds[pidx], pdes, chunklen=chunklen, nchunks=nchunks,
                                              part_attr='chunks', mode='test',
                                              alphas=alphas, single_alpha=True, normalpha=False,
                                              nboots=nboots, corrmin=.2, singcutoff=1e-10, joined=None,
                                              plot=debug, use_corr=True)
    print 'language ' + str(np.mean(lres))

    # pictures within
    print 'pictures: ' + str(np.mean(pres))

# need to change outstring
    if write:
        from mvpa2.base import dataset
        map2nifti(thisDS, dataset.vstack([lres, pres])) \
            .to_filename(os.path.join(paths[0], 'Maps', 'Encoding', sub + '_' + roi + '_' + thisContrastStr +
                                      '_ridge_corrs.nii.gz'))
        map2nifti(thisDS, dataset.vstack([lwts, pwts])) \
            .to_filename(os.path.join(paths[0], 'Maps', 'Encoding', sub + '_' + roi + '_' + thisContrastStr +
                                      '_ridge_weights.nii.gz'))
        map2nifti(thisDS, dataset.vstack([lalphas, palphas])) \
            .to_filename(os.path.join(paths[0], 'Maps', 'Encoding', sub + '_' + roi + '_' + thisContrastStr +
                                      '_ridge_alphas.nii.gz'))
        map2nifti(thisDS, dataset.vstack([lceil, pceil])) \
            .to_filename(os.path.join(paths[0], 'Maps', 'Encoding', sub + '_' + roi + '_' + thisContrastStr +
                                      '_ridge_ceiling.nii.gz'))

    del lres, pres, lwts, pwts, lalphas, palphas, lceil, pceil
    crossSet = thisDS.copy()
    crossSet.chunks[lidx] = 1
    crossSet.chunks[pidx] = 2
    cwts, calphas, cres, cceil = bsr.bootstrap_ridge(crossSet, des, chunklen=chunklen, nchunks=nchunks,
                                              part_attr='chunks', mode='test',
                                              alphas=alphas, single_alpha=True, normalpha=False,
                                              nboots=nboots, corrmin=.2, singcutoff=1e-10, joined=None,
                                              use_corr=True)
    print 'cross: ' + str(np.mean(cres))
    if write:
        map2nifti(thisDS, cres[0]).to_filename(
            os.path.join(paths[0], 'Maps', 'Encoding', sub + '_' + roi + '_' + thisContrastStr + '_P2L_ridge_corr.nii.gz'))
        map2nifti(thisDS, cres[1]).to_filename(
            os.path.join(paths[0], 'Maps', 'Encoding', sub + '_' + roi + '_' + thisContrastStr + '_L2P_ridge_corr.nii.gz'))

        map2nifti(thisDS, cwts[0]).to_filename(
            os.path.join(paths[0], 'Maps', 'Encoding', sub + '_' + roi + '_' + thisContrastStr + '_P2L_ridge_weights.nii.gz'))
        map2nifti(thisDS, cwts[1]).to_filename(
            os.path.join(paths[0], 'Maps', 'Encoding', sub + '_' + roi + '_' + thisContrastStr + '_L2P_ridge_weights.nii.gz'))

        map2nifti(thisDS, calphas[calphas.chunks==1]).to_filename(
            os.path.join(paths[0], 'Maps', 'Encoding', sub + '_' + roi + '_' + thisContrastStr + '_P2L_ridge_alphas.nii.gz'))
        map2nifti(thisDS, calphas[calphas.chunks==2]).to_filename(
            os.path.join(paths[0], 'Maps', 'Encoding', sub + '_' + roi + '_' + thisContrastStr + '_L2P_ridge_alphas.nii.gz'))

        map2nifti(thisDS, cceil[0]).to_filename(
            os.path.join(paths[0], 'Maps', 'Encoding', sub + '_' + roi + '_' + thisContrastStr + '_P2L_ridge_ceiling.nii.gz'))
        map2nifti(thisDS, cceil[1]).to_filename(
            os.path.join(paths[0], 'Maps', 'Encoding', sub + '_' + roi + '_' + thisContrastStr + '_L2P_ridge_ceiling.nii.gz'))
    del cres, cwts, calphas, cceil
             empty_conditions.append(c)
         elif len(conditions[c]) < 2:
             pseudo_empty_conditions.append(c)
         condition += [c]*len(conditions[c])
         onset = np.hstack([onset, conditions[c]])
         duration += [durations[c]]*len(conditions[c])
 
     paradigm = BlockParadigm(con_id=condition,
                              onset=onset,
                              duration=duration)
                                
     frametimes = np.linspace(0, (N_SCANS-1)*TR/1000., num=N_SCANS)
 
     design_mat = design_matrix.make_dmtx(frametimes, paradigm,
                                      hrf_model='Canonical with derivative',
                                      add_regs=regressors,
                                      drift_model='Cosine',
                                      hfcut=128)
 
     
     # Fill empty conditions
     for c in empty_conditions:
         if c == 'anticip_missed_largewin':
             ind = 6
         elif c == 'feedback_missed_largewin':
             ind = 18
         elif c == 'anticip_missed_smallwin':
             ind = 10
         elif c == 'feedback_missed_smallwin':
             ind = 20
         elif c == 'anticip_missed_nowin':
Exemplo n.º 39
0
def fit_event_hrf_model(ds,
                        events,
                        time_attr,
                        condition_attr='targets',
                        design_kwargs=None,
                        glmfit_kwargs=None,
                        regr_attrs=None,
                        return_model=False):
    """Fit a GLM with HRF regressor and yield a dataset with model parameters

    A univariate GLM is fitted for each feature and model parameters are
    returned as samples. Model parameters are returned for each regressor in
    the design matrix. Using functionality from NiPy, design matrices can be
    generated from event definitions with a variety of customizations (HRF
    model, confound regressors, ...).

    Events need to be specified as a list of dictionaries
    (see:class:`~mvpa2.misc.support.Event`) for a helper class. Each dictionary
    contains all relevant attributes to describe an event.

    HRF event model details
    -----------------------

    The event specifications are used to generate a design matrix for all
    present conditions. In addition to the mandatory ``onset`` information
    each event definition needs to include a label in order to associate
    individual events to conditions (the design matrix will contain at least
    one regressor for each condition). The name of this label attribute must
    be specified too (see ``condition_attr`` argument).

    NiPy is used to generate the actual design matrix.  It is required to
    specify a dataset sample attribute that contains time stamps for all input
    data samples (see ``time_attr``).  NiPy operation could be customized (see
    ``design_kwargs`` argument). Additional regressors from sample attributes
    of the input dataset can be included in the design matrix (see
    ``regr_attrs``).

    The actual GLM fit is also performed by NiPy and can be fully customized
    (see ``glmfit_kwargs``).

    Parameters
    ----------
    ds : Dataset
      The samples of this input dataset have to be in whatever ascending order.
    events : list
      Each event definition has to specify ``onset`` and ``duration``. All
      other attributes will be passed on to the sample attributes collection of
      the returned dataset.
    time_attr : str
      Attribute with dataset sample time stamps.
      Its values will be treated as in-the-same-unit and are used to
      determine corresponding samples from real-value onset and duration
      definitions. For HRF modeling this argument is mandatory.
    condition_attr : str
      Name of the event attribute with the condition labels.
      Can be a list of those (e.g. ['targets', 'chunks'] combination of which
      would constitute a condition.
    design_kwargs : dict
      Arbitrary keyword arguments for NiPy's make_dmtx() used for design matrix
      generation. Choose HRF model, confound regressors, etc.
    glmfit_kwargs : dict
      Arbitrary keyword arguments for NiPy's GeneralLinearModel.fit() used for
      estimating model parameter. Choose fitting algorithm: OLS or AR1.
    regr_attrs : list
      List of dataset sample attribute names that shall be extracted from the
      input dataset and used as additional regressors in the design matrix.
    return_model : bool
      Flag whether to included the fitted GLM model in the returned dataset.
      For large input data this can be problematic, as the model may contain
      the residuals (same size is input data), hence multiplies the memory
      demand. Off by default.

    Returns
    -------
    Dataset
      One sample for each regressor/condition in the design matrix is returned.
      The condition names are included as a sample attribute with the name
      specified by the ``condition_attr`` argument.  The actual design
      regressors are included as ``regressors`` sample attribute. If enabled,
      an instance with the fitted NiPy GLM results is included as a dataset
      attribute ``model``, and can be used for computing contrasts subsequently.

    Examples
    --------
    The documentation also contains an :ref:`example script
    <example_eventrelated>` showing a spatio-temporal analysis of fMRI data
    that involves this function.

    >>> from mvpa2.datasets import Dataset
    >>> ds = Dataset(np.random.randn(10, 25))
    >>> ds.sa['time_coords'] = np.linspace(0, 50, len(ds))
    >>> events = [{'onset': 2, 'duration': 4, 'condition': 'one'},
    ...           {'onset': 4, 'duration': 4, 'condition': 'two'}]
    >>> hrf_estimates = fit_event_hrf_model(
    ...                   ds, events,
    ...                   time_attr='time_coords',
    ...                   condition_attr='condition',
    ...                   design_kwargs=dict(drift_model='blank'),
    ...                   glmfit_kwargs=dict(model='ols'),
    ...                   return_model=True)
    >>> print hrf_estimates.sa.condition
    ['one' 'two']
    >>> print hrf_estimates.shape
    (2, 25)
    >>> len(hrf_estimates.a.model.get_mse())
    25

    Additional regressors used in GLM modeling are also available in a
    dataset attribute:

    >>> print hrf_estimates.a.add_regs.sa.regressor_names
    ['constant']
    """
    if externals.exists('nipy', raise_=True):
        from nipy.modalities.fmri.design_matrix import make_dmtx
        from mvpa2.mappers.glm import NiPyGLMMapper

    # Decide/device condition attribute on which GLM will actually be done
    if isinstance(condition_attr, str):
        # must be a list/tuple/array for the logic below
        condition_attr = [condition_attr]

    glm_condition_attr = 'regressor_names'  # actual regressors
    glm_condition_attr_map = dict([(con, dict()) for con in condition_attr])  #
    # to map back to original conditions
    events = copy.deepcopy(events)  # since we are modifying in place
    for event in events:
        if glm_condition_attr in event:
            raise ValueError("Event %s already has %s defined.  Should not "
                             "happen.  Choose another name if defined it" %
                             (event, glm_condition_attr))
        compound_label = event[glm_condition_attr] = \
            'glm_label_' + '+'.join(
                str(event[con]) for con in condition_attr)
        # and mapping back to original values, without str()
        # for each condition:
        for con in condition_attr:
            glm_condition_attr_map[con][compound_label] = event[con]

    evvars = _events2dict(events)
    add_paradigm_kwargs = {}
    if 'amplitude' in evvars:
        add_paradigm_kwargs['amplitude'] = evvars['amplitude']
    # create paradigm
    if 'duration' in evvars:
        from nipy.modalities.fmri.experimental_paradigm import BlockParadigm
        # NiPy considers everything with a duration as a block paradigm
        paradigm = BlockParadigm(con_id=evvars[glm_condition_attr],
                                 onset=evvars['onset'],
                                 duration=evvars['duration'],
                                 **add_paradigm_kwargs)
    else:
        from nipy.modalities.fmri.experimental_paradigm \
                import EventRelatedParadigm
        paradigm = EventRelatedParadigm(con_id=evvars[glm_condition_attr],
                                        onset=evvars['onset'],
                                        **add_paradigm_kwargs)
    # create design matrix -- all kinds of fancy additional regr can be
    # auto-generated
    if design_kwargs is None:
        design_kwargs = {}

    if regr_attrs is not None:
        names = []
        regrs = []
        for attr in regr_attrs:
            regr = ds.sa[attr].value
            # add rudimentary dimension for easy hstacking later on
            if regr.ndim < 2:
                regr = regr[:, np.newaxis]
            if regr.shape[1] == 1:
                names.append(attr)
            else:
                #  add one per each column of the regressor
                for i in range(regr.shape[1]):
                    names.append("%s.%d" % (attr, i))
            regrs.append(regr)
        regrs = np.hstack(regrs)

        if 'add_regs' in design_kwargs:
            design_kwargs['add_regs'] = np.hstack(
                (design_kwargs['add_regs'], regrs))
        else:
            design_kwargs['add_regs'] = regrs
        if 'add_reg_names' in design_kwargs:
            design_kwargs['add_reg_names'].extend(names)
        else:
            design_kwargs['add_reg_names'] = names

    design_matrix = make_dmtx(ds.sa[time_attr].value, paradigm,
                              **design_kwargs)

    # push design into source dataset
    glm_regs = [(reg, design_matrix.matrix[:, i])
                for i, reg in enumerate(design_matrix.names)]

    # GLM
    glm = NiPyGLMMapper([],
                        glmfit_kwargs=glmfit_kwargs,
                        add_regs=glm_regs,
                        return_design=True,
                        return_model=return_model,
                        space=glm_condition_attr)

    model_params = glm(ds)

    # some regressors might be corresponding not to original condition_attr
    # so let's separate them out
    regressor_names = model_params.sa[glm_condition_attr].value
    condition_regressors = np.array([
        v in list(glm_condition_attr_map.values())[0] for v in regressor_names
    ])
    assert (condition_regressors.dtype == np.bool)
    if not np.all(condition_regressors):
        # some regressors do not correspond to conditions and would need
        # to be taken into a separate dataset
        model_params.a['add_regs'] = model_params[~condition_regressors]
        # then we process the rest
        model_params = model_params[condition_regressors]
        regressor_names = model_params.sa[glm_condition_attr].value

    # now define proper condition sa's
    for con, con_map in glm_condition_attr_map.items():
        model_params.sa[con] = [con_map[v] for v in regressor_names]
    model_params.sa.pop(glm_condition_attr)  # remove generated one
    return model_params
Exemplo n.º 40
0
def _fit_hrf_event_model(
    ds, events, time_attr, condition_attr="targets", design_kwargs=None, glmfit_kwargs=None, regr_attrs=None
):
    if externals.exists("nipy", raise_=True):
        from nipy.modalities.fmri.design_matrix import make_dmtx
        from mvpa2.mappers.glm import NiPyGLMMapper

    # Decide/device condition attribute on which GLM will actually be done
    if isinstance(condition_attr, basestring):
        # must be a list/tuple/array for the logic below
        condition_attr = [condition_attr]

    glm_condition_attr = "regressor_names"  # actual regressors
    glm_condition_attr_map = dict([(con, dict()) for con in condition_attr])  #
    # to map back to original conditions
    events = copy.deepcopy(events)  # since we are modifying in place
    for event in events:
        if glm_condition_attr in event:
            raise ValueError(
                "Event %s already has %s defined.  Should not "
                "happen.  Choose another name if defined it" % (event, glm_condition_attr)
            )
        compound_label = event[glm_condition_attr] = "glm_label_" + "+".join(str(event[con]) for con in condition_attr)
        # and mapping back to original values, without str()
        # for each condition:
        for con in condition_attr:
            glm_condition_attr_map[con][compound_label] = event[con]

    evvars = _events2dict(events)
    add_paradigm_kwargs = {}
    if "amplitude" in evvars:
        add_paradigm_kwargs["amplitude"] = evvars["amplitude"]
    # create paradigm
    if "duration" in evvars:
        from nipy.modalities.fmri.experimental_paradigm import BlockParadigm

        # NiPy considers everything with a duration as a block paradigm
        paradigm = BlockParadigm(
            con_id=evvars[glm_condition_attr], onset=evvars["onset"], duration=evvars["duration"], **add_paradigm_kwargs
        )
    else:
        from nipy.modalities.fmri.experimental_paradigm import EventRelatedParadigm

        paradigm = EventRelatedParadigm(con_id=evvars[glm_condition_attr], onset=evvars["onset"], **add_paradigm_kwargs)
    # create design matrix -- all kinds of fancy additional regr can be
    # auto-generated
    if design_kwargs is None:
        design_kwargs = {}
    if not regr_attrs is None:
        names = []
        regrs = []
        for attr in regr_attrs:
            names.append(attr)
            regrs.append(ds.sa[attr].value)
        if len(regrs) < 2:
            regrs = [regrs]
        regrs = np.hstack(regrs).T
        if "add_regs" in design_kwargs:
            design_kwargs["add_regs"] = np.hstack((design_kwargs["add_regs"], regrs))
        else:
            design_kwargs["add_regs"] = regrs
        if "add_reg_names" in design_kwargs:
            design_kwargs["add_reg_names"].extend(names)
        else:
            design_kwargs["add_reg_names"] = names
    design_matrix = make_dmtx(ds.sa[time_attr].value, paradigm, **design_kwargs)

    # push design into source dataset
    glm_regs = [(reg, design_matrix.matrix[:, i]) for i, reg in enumerate(design_matrix.names)]

    # GLM
    glm = NiPyGLMMapper(
        [],
        glmfit_kwargs=glmfit_kwargs,
        add_regs=glm_regs,
        return_design=True,
        return_model=True,
        space=glm_condition_attr,
    )

    model_params = glm(ds)

    # some regressors might be corresponding not to original condition_attr
    # so let's separate them out
    regressor_names = model_params.sa[glm_condition_attr].value
    condition_regressors = np.array([v in glm_condition_attr_map.values()[0] for v in regressor_names])
    assert condition_regressors.dtype == np.bool
    if not np.all(condition_regressors):
        # some regressors do not correspond to conditions and would need
        # to be taken into a separate dataset
        model_params.a["add_regs"] = model_params[~condition_regressors]
        # then we process the rest
        model_params = model_params[condition_regressors]
        regressor_names = model_params.sa[glm_condition_attr].value

    # now define proper condition sa's
    for con, con_map in glm_condition_attr_map.iteritems():
        model_params.sa[con] = [con_map[v] for v in regressor_names]
    model_params.sa.pop(glm_condition_attr)  # remove generated one
    return model_params
Exemplo n.º 41
0
    def preprocess_files(self, func_files, anat_files=None, verbose=1):
        def get_beta_filepath(func_file, cond):
            return func_file.replace('_bold.nii.gz', '_beta-%s.nii.gz' % cond)

        beta_files = []
        for fi, func_file in enumerate(func_files):
            # Don't re-do preprocessing.
            beta_mask = func_file.replace('_bold.nii.gz', '_beta*.nii.gz')

            cond_file = func_file.replace('_bold.nii.gz', '_events.tsv')
            cond_data = pd.read_csv(cond_file, sep='\t')

            # Get condition info, to search if betas have been done.
            conditions = cond_data['trial_type'].tolist()
            all_conds = np.unique(conditions)
            all_beta_files = [
                get_beta_filepath(func_file, cond) for cond in all_conds
            ]
            # All betas are done.
            if np.all([os.path.exists(f) for f in all_beta_files]):
                beta_files += all_beta_files
                continue

            if verbose >= 0:
                print('Preprocessing file %d of %d' %
                      (fi + 1, len(func_files)))

            # Need to do regression.
            tr = cond_data['duration'].as_matrix().mean()
            onsets = cond_data['onset'].tolist()

            img = nibabel.load(func_file)
            n_scans = img.shape[3]
            frametimes = np.linspace(0, (n_scans - 1) * tr, n_scans)

            # Create the design matrix
            paradigm = EventRelatedParadigm(conditions, onsets)
            design_mat = dm.make_dmtx(frametimes,
                                      paradigm,
                                      drift_model='cosine',
                                      hfcut=n_scans,
                                      hrf_model='canonical')

            # Do the GLM
            mask_img = compute_epi_mask(img)
            fmri_glm = FMRILinearModel(img, design_mat.matrix, mask=mask_img)
            fmri_glm.fit(do_scaling=True, model='ar1')

            # Pull out the betas
            beta_hat = fmri_glm.glms[0].get_beta(
            )  # Least-squares estimates of the beta
            mask = fmri_glm.mask.get_data() > 0

            # output beta images
            dim = design_mat.matrix.shape[1]
            beta_map = np.tile(mask.astype(np.float)[..., np.newaxis], dim)
            beta_map[mask] = beta_hat.T
            beta_image = nibabel.Nifti1Image(beta_map, fmri_glm.affine)
            beta_image.get_header()['descrip'] = (
                'Parameter estimates of the localizer dataset')

            # Save beta images
            for ci, cond in enumerate(np.unique(conditions)):
                beta_cond_img = index_img(beta_image, ci)
                beta_filepath = get_beta_filepath(func_file, cond)
                nibabel.save(beta_cond_img, beta_filepath)
                beta_files.append(beta_filepath)

        return beta_files
Exemplo n.º 42
0
nscans = 128
frametimes = np.linspace(0, (nscans - 1) * tr, nscans)

# experimental paradigm
conditions = ['c0', 'c0', 'c0', 'c1', 'c1', 'c1', 'c3', 'c3', 'c3']
onsets = [30, 70, 100, 10, 30, 90, 30, 40, 60]
hrf_model = 'canonical'
motion = np.cumsum(np.random.randn(128, 6), 0)
add_reg_names = ['tx', 'ty', 'tz', 'rx', 'ry', 'rz']

#event-related design matrix
paradigm = EventRelatedParadigm(conditions, onsets)

X1 = make_dmtx(frametimes,
               paradigm,
               drift_model='polynomial',
               drift_order=3,
               add_regs=motion,
               add_reg_names=add_reg_names)

# block design matrix
duration = 7 * np.ones(9)
paradigm = BlockParadigm(con_id=conditions, onset=onsets, duration=duration)

X2 = make_dmtx(frametimes, paradigm, drift_model='polynomial', drift_order=3)

# FIR model
paradigm = EventRelatedParadigm(conditions, onsets)
hrf_model = 'FIR'
X3 = make_dmtx(frametimes,
               paradigm,
               hrf_model='fir',
Exemplo n.º 43
0
def glm_nipy(fmri_data, contrasts=None, hrf_model='Canonical',
             drift_model='Cosine', hfcut=128,
             residuals_model='spherical', fit_method='ols',
             fir_delays=[0],
             rescale_results=False, rescale_factor=None):

    """
    Perform a GLM analysis on fMRI data using the implementation of Nipy.

    Args:
        fmri_data (pyhrf.core.FmriData): the input fMRI data defining the
            paradigm and the measured 3D+time signal.
        contrasts (dict): keys are contrast labels and values are arithmetic
            expressions involving regressor names. Valid names are:
            * names of experimental conditions as defined in fmri_data
            * constant
        hrf_model: "Canonical", "Canonical with Derivative", "FIR"
        residuals_model: "spherical", "ar1"
        fit_method: "ols", "kalman" (If residuals_model is "ar1" then method
            is set to "kalman" and this argument is ignored)
        fir_delays: list of integers indicating the delay of each FIR coefficient
                    (in terms of scans). Eg if TR = 2s. and we want a FIR
                    duration of 20s.: fir_delays=range(10)
    Returns:
        (glm instance, design matrix, dict of contrasts of objects)

    Examples:
    >>> from pyhrf.core import FmriData
    >>> from pyhrf.glm import glm_nipy
    >>> g,dmtx,con = glm_nipy(FmriData.from_vol_ui())
    >>> g,dmtx,con = glm_nipy(FmriData.from_vol_ui(), \
                              contrasts={'A-V':'audio-video'})
    """

    paradigm = fmri_data.paradigm.to_nipy_paradigm()


    # BOLD data
    Y = fmri_data.bold.T
    n_scans = Y.shape[1]
    # pyhrf.verbose(1, 'Input BOLD: nvox=%d, nscans=%d' %Y.shape)

    # Design matrix
    frametimes = np.linspace(0, (n_scans-1)*fmri_data.tr, n_scans)
    design_matrix = dm.make_dmtx(frametimes, paradigm,
                                 hrf_model=hrf_model,
                                 drift_model=drift_model, hfcut=hfcut,
                                 fir_delays=fir_delays)

    ns, nr = design_matrix.matrix.shape
    pyhrf.verbose(2, 'Design matrix built with %d regressors:' %nr)
    for rn in design_matrix.names:
        pyhrf.verbose(2, '    - %s' %rn)

    # ax = design_matrix.show()
    # ax.set_position([.05, .25, .9, .65])
    # ax.set_title('Design matrix')
    # plt.savefig(op.join(output_dir, 'design_matrix.png'))

    # GLM fit
    my_glm = glm.glm()
    pyhrf.verbose(2, 'Fit GLM - method: %s, residual model: %s' \
                      %(fit_method,residuals_model))
    my_glm.fit(Y.T, design_matrix.matrix, method=fit_method,
               model=residuals_model)

    from pyhrf.tools import map_dict
    from pyhrf.paradigm import contrasts_to_spm_vec

    if rescale_results:

        # Rescale by the norm of the HRF:
        # from nipy.modalities.fmri.hemodynamic_models import _hrf_kernel, \
        #     sample_condition
        # oversampling = 16
        # hrfs = _hrf_kernel(hrf_model, fmri_data.tr, oversampling,
        #                    fir_delays=fir_delays)
        # hframetimes = np.linspace(0, 32., int(32./fmri_data.tr))
        # hr_regressor, hr_frametimes = sample_condition(
        #     (np.array([0]),np.array([0]),np.array([1])),
        #     hframetimes, oversampling)
        # from scipy.interpolate import interp1d
        # for i in xrange(len(hrfs)):
        #     f = interp1d(hr_frametimes, hrfs[i])
        #     hrfs[i] = f(hframetimes).T

        # n_conds = len(fmri_data.paradigm.stimOnsets)
        # for i in xrange(n_conds * len(hrfs)):
        #     h = hrfs[i%len(hrfs)]
        #     my_glm.beta[i] = my_glm.beta[i] * (h**2).sum()**.5

        #my_glm.variance = np.zeros_like(my_glm.beta)
        if 1:
            if rescale_results and rescale_factor is None:
                #Rescale by the norm of each regressor in the design matrix
                dm_reg_norms = (design_matrix.matrix**2).sum(0)**.5
                pyhrf.verbose(2,'GLM results (beta and con effects) are '\
                                  'rescaled by reg norm. Weights: %s ' \
                                  %str(dm_reg_norms))
    
                for ib in xrange(my_glm.beta.shape[0]):
                    my_glm.beta[ib] = my_glm.beta[ib] * dm_reg_norms[ib]
                    #my_glm.nvbeta[ib,:] = my_glm.nvbeta[ib,:] * dm_reg_norms[ib]**2
    
            else:
                pyhrf.verbose(2,'GLM results (beta and con effects) are '\
                                  'rescaled by input scale factor.')
    
                # Use input rescale factors:
                for ib in xrange(rescale_factor.shape[0]):
                    my_glm.beta[ib] = my_glm.beta[ib] * rescale_factor[ib]
                    #TOCHECK: nvbeta seems to be a covar matrix between reg
                    # -> we dont get position-specific variances ...
                    #my_glm.nvbeta[ib,:] = my_glm.nvbeta[ib,:] * rescale_factor[ib]**2
    
    if contrasts is not None:
        con_vectors = contrasts_to_spm_vec(design_matrix.names, contrasts)
        # if rescale_results:
        #     for con_vec in con_vectors.itervalues():
        #         con_vec *= dm_reg_norms
        contrast_result = map_dict(my_glm.contrast, con_vectors)
    else:
        contrast_result = None


    return my_glm, design_matrix, contrast_result

#actually: not possible to compute PPM from glm results
#Should relaunch estimation with propoer model under SPM
#def PPMcalculus_glmWN(beta, var_beta, dm, threshold_value):
    '''
Exemplo n.º 44
0
            fmri_series = [
                os.path.join(
                    fmri_dir,
                    '%s_series_%s_rh_smooth5.gii' % (subject, session))
                for session in sessions
            ]

        # compute the mask
        if side == False:
            mean_img = glob.glob(os.path.join(fmri_dir, 'mean*.nii'))[0]
            mask_array = compute_mask_files(mean_img, epi_mask, True,
                                            inf_threshold, sup_threshold)[0]

        # get the contrasts
        n_reg = make_dmtx(frametimes,
                          add_regs=reg_matrix,
                          drift_model=drift_model,
                          hfcut=hfcut).matrix.shape[1]

        contrasts, all_reg = make_contrasts(sessions, n_reg)
        contrast_obj = {}

        for (sess, (session,
                    fmri_data)) in enumerate(zip(sessions, fmri_series)):
            # create design matrices
            reg = all_reg[2 * sess:2 * sess + 2]  # fixme
            design_matrix = make_dmtx(frametimes,
                                      add_regs=reg_matrix,
                                      add_reg_names=reg,
                                      drift_model=drift_model,
                                      hfcut=hfcut)
Exemplo n.º 45
0
# design matrix
TR = 7
N_SCANS = 84

conditions = ['listen'] * 7
duration = TR * 6 * np.ones(7)

onset = np.arange(6, N_SCANS, 12) * TR
paradigm = BlockParadigm(con_id=conditions,
                         onset=onset,
                         duration=duration)
frametimes = np.linspace(0, (N_SCANS - 1) * TR, N_SCANS)

design_mat = design_matrix.make_dmtx(frametimes, paradigm,
                                     hrf_model='Canonical',
                                     drift_model='Cosine',
                                     hfcut=168)
design_mat.show()


# general linear model
glm = FMRILinearModel(input_image, design_mat.matrix, mask='compute')
glm.fit()



# contrasts
c = np.hstack([1, np.zeros(len(design_mat.names)-1)])

z_map, t_map, eff_map, var_map = glm.contrast(c,
                                              contrast_type='t',
Exemplo n.º 46
0
def _make_design_matrix(run_frame, hrf_model='canonical', drift_model='cosine', orthogonalize=None):
    # print ' %s' % ' '.join(run_frame[['study', 'subject', 'model', 'task', 'run']].values[0])
    bold_file = run_frame.preproc_bold.unique()[0]
    n_scans = nb.load(bold_file).shape[-1]
    tr = float(run_frame.TR.unique()[0])
    frametimes = np.linspace(0, (n_scans - 1) * tr, n_scans)
    movement_regressors = run_frame.movement.unique()[0]
    movement_regressors = np.recfromtxt(movement_regressors) \
        if isinstance(movement_regressors, basestring) else None
    names = []
    times = []
    durations = []
    amplitudes = []
    for condition_id, condition_name, condition_file in run_frame[['condition', 'condition_name', 'condition_file']].values:
        conditions = _csv_to_dict(condition_file)
        if condition_id == 'empty_evs':
            conditions = sorted(conditions.keys())
            for c in conditions:
                names.append(['cond%03i' % int(c)])
                times.append([0])
                durations.append([0])
                amplitudes.append([0])
        else:
            keys = sorted(conditions.keys())
            times.append(np.array(keys).astype('float'))
            names.append(['%s_%s' % (condition_id, condition_name)] * len(keys))
            durations.append([float(conditions[k][0]) for k in keys])
            amplitudes.append([float(conditions[k][1]) for k in keys])

    times = np.concatenate(times).ravel()
    order = np.argsort(times)
    times = times[order]
    names = np.concatenate(names)[order]
    durations = np.concatenate(durations)[order]
    amplitudes = np.concatenate(amplitudes)[order]

    if durations.sum() == 0:
        paradigm = EventRelatedParadigm(names, times, amplitudes)
    else:
        paradigm = BlockParadigm(names, times, durations, amplitudes)

    if movement_regressors is None:
        design_matrix = make_dmtx(
            frametimes, paradigm, hrf_model=hrf_model,
            drift_model=drift_model)
    else:
        mov_reg_names = ['movement_%i' % r
                     for r in range(movement_regressors.shape[1])]
        design_matrix = make_dmtx(
            frametimes, paradigm, hrf_model=hrf_model,
            drift_model=drift_model,
            add_regs=movement_regressors, add_reg_names=mov_reg_names)

    # orthogonalize regressors
    if orthogonalize is not None and not 'derivative' in hrf_model:
        task_id = run_frame.task.unique()[0]

        for x, y in orthogonalize[orthogonalize.index == task_id].values:
            x_ = design_matrix.matrix[:, x]
            y_ = design_matrix.matrix[:, y]
            z = _orthogonalize_vectors(x_, y_)
            design_matrix.matrix[:, x] = z

    return bold_file, pd.DataFrame(dict(zip(design_matrix.names, design_matrix.matrix.T)))
Exemplo n.º 47
0
                              struct_as_record=False)

    faces_onsets = timing['onsets'][0].ravel()
    scrambled_onsets = timing['onsets'][1].ravel()
    onsets = np.hstack((faces_onsets, scrambled_onsets))
    onsets *= tr  # because onsets were reporting in 'scans' units
    conditions = ['faces'] * len(faces_onsets) + ['scrambled'
                                                  ] * len(scrambled_onsets)
    paradigm = EventRelatedParadigm(conditions, onsets)

    # build design matrix
    frametimes = np.linspace(0, (n_scans - 1) * tr, n_scans)
    design_matrix = make_dmtx(
        frametimes,
        paradigm,
        hrf_model=hrf_model,
        drift_model=drift_model,
        hfcut=hfcut,
        add_reg_names=['tx', 'ty', 'tz', 'rx', 'ry', 'rz'],
        add_regs=np.loadtxt(subject_data.realignment_parameters[x]))

    design_matrices.append(design_matrix)

# specify contrasts
contrasts = {}
n_columns = len(design_matrix.names)
for i in xrange(paradigm.n_conditions):
    contrasts['%s' % design_matrix.names[2 * i]] = np.eye(n_columns)[2 * i]

# more interesting contrasts
contrasts['faces-scrambled'] = contrasts['faces'] - contrasts['scrambled']
contrasts['scrambled-faces'] = -contrasts['faces-scrambled']
Exemplo n.º 48
0
tr = 1.0
nscans = 128
frametimes = np.linspace(0, (nscans - 1) * tr, nscans)

# experimental paradigm
conditions = ['c0', 'c0', 'c0', 'c1', 'c1', 'c1', 'c3', 'c3', 'c3']
onsets = [30, 70, 100, 10, 30, 90, 30, 40, 60]
hrf_model = 'canonical'
motion = np.cumsum(np.random.randn(128, 6), 0)
add_reg_names = ['tx', 'ty', 'tz', 'rx', 'ry', 'rz']

#event-related design matrix
paradigm = EventRelatedParadigm(conditions, onsets)

X1 = make_dmtx(
    frametimes, paradigm, drift_model='polynomial', drift_order=3,
    add_regs=motion, add_reg_names=add_reg_names)

# block design matrix
duration = 7 * np.ones(9)
paradigm = BlockParadigm(con_id=conditions, onset=onsets,
                             duration=duration)

X2 = make_dmtx(frametimes, paradigm, drift_model='polynomial',
                         drift_order=3)

# FIR model
paradigm = EventRelatedParadigm(conditions, onsets)
hrf_model = 'FIR'
X3 = make_dmtx(frametimes, paradigm, hrf_model='fir',
               drift_model='polynomial', drift_order=3,
def do_glm_for_subject(subject_id, bold_base_folder, trial_base_folder,
                       output_base_folder):

    subject_dir = path(bold_base_folder) / ("sub%03d" % subject_id)
    output_dir = (path(output_base_folder) / ("sub%03d" % subject_id) /
                  "model001")
    print output_dir
    if not output_dir.exists():
        output_dir.makedirs()


    anat_file = subject_dir / "highres001.nii"
    anat = nb.load(anat_file)
    run_ids = range(1, 10)

    task_bold_files = [subject_dir.glob("task001_run%03d/rbold*.nii"
                                        % rid)[0]
                       for rid in run_ids]
    task_mvt_files = [subject_dir.glob("task001_run%03d/rp_bold*.txt" % 
                                       rid)[0]
                      for rid in run_ids]

    trial_files = [(path(trial_base_folder) / ("Sub%02d" % subject_id) /
                   "BOLD" / "Trials" / ("run_%02d_spmdef.txt" % rid))
                   for rid in range(1, 10)]
    stats_start_time = pretty_time()
    paradigms = []
    design_matrices = []
    n_scans = []
    all_frametimes = []
    list_of_contrast_dicts = []  # one dict per run

    for bold_file, mvt_file, trial_file in zip(task_bold_files, 
                                               task_mvt_files,
                                               trial_files):

        _n_scans = nb.load(bold_file).shape[-1]
        n_scans.append(_n_scans)
        paradigm = make_paradigm(trial_file)
        paradigms.append(paradigm)
        movements = np.loadtxt(mvt_file)

        tr = 2.
        drift_model = "Cosine"
        hrf_model = "Canonical With Derivative"
        hfcut = 128.

        frametimes = np.linspace(0, (_n_scans - 1) * tr, _n_scans)

        design_matrix = make_dmtx(
            frametimes,
            paradigm,
            hrf_model=hrf_model,
            drift_model=drift_model,
            hfcut=hfcut,
            add_regs=movements,
            add_reg_names=[
                "Tx", "Ty", "Tz", "R1", "R2", "R3"])

        design_matrices.append(design_matrix)
        all_frametimes.append(frametimes)

        # specify contrasts
        contrasts = {}
        n_columns = len(design_matrix.names)
        for i in xrange(paradigm.n_conditions):
            contrasts['%s' % design_matrix.names[2 * i]] = np.eye(
                n_columns)[2 * i]

        # more interesting contrasts"""
        contrasts['Famous-Unfamiliar'] = contrasts[
            'Famous'] - contrasts['Unfamiliar']
        contrasts['Unfamiliar-Famous'] = -contrasts['Famous-Unfamiliar']
        contrasts['Famous-Scrambled'] = contrasts[
            'Famous'] - contrasts['Scrambled']
        contrasts['Scrambled-Famous'] = -contrasts['Famous-Scrambled']
        contrasts['Unfamiliar-Scrambled'] = contrasts[
            'Unfamiliar'] - contrasts['Scrambled']
        contrasts['Scrambled-Unfamiliar'] = -contrasts['Unfamiliar-Scrambled']

        list_of_contrast_dicts.append(contrasts)


    # importat maps
    z_maps = {}
    effects_maps = {}

    fmri_glm = FMRILinearModel(task_bold_files,
                               [dm.matrix for dm in design_matrices],
                               mask="compute")
    fmri_glm.fit(do_scaling=True, model="ar1")

    # replicate contrasts across runs
    contrasts = dict((cid, [contrasts[cid]
                            for contrasts in list_of_contrast_dicts])
                     for cid, cval in contrasts.iteritems())

    # compute effects
    for contrast_id, contrast_val in contrasts.iteritems():
        print "\tcontrast id: %s" % contrast_id
        z_map, eff_map, var_map = fmri_glm.contrast(
            contrast_val,
            con_id=contrast_id,
            output_z=True,
            output_stat=False,
            output_effects=True,
            output_variance=True
            )

        for map_type, out_map in zip(['z', 'effects', 'variance'], 
                                     [z_map, eff_map, var_map]):
            map_dir = output_dir / ('%s_maps' % map_type)
            if not map_dir.exists():
                map_dir.makedirs()
            map_path = map_dir / ('%s.nii.gz' % contrast_id)
            print "\t\tWriting %s ..." % map_path
            nb.save(out_map, map_path)

            # collect zmaps for contrasts we're interested in
            if map_type == 'z':
                z_maps[contrast_id] = map_path

            if map_type == 'effects':
                effects_maps[contrast_id] = map_path

            if map_type == "variance":
                effects_maps[contrast_id] = map_path

    stats_report_dir = output_dir / "report"
    if not stats_report_dir.exists():
        stats_report_dir.makedirs()

    stats_report_filename = stats_report_dir / "report_stats.html"
    # remove repeated contrasts
    contrasts = dict((cid, cval[0]) for cid, cval in contrasts.iteritems())

    slicer = 'z'
    cut_coords = [-20, -10, 0, 10, 20, 30, 40, 50]
    threshold = 3.
    cluster_th = 15


    generate_subject_stats_report(
        stats_report_filename,
        contrasts,
        z_maps,
        fmri_glm.mask,
        anat_affine=anat.get_affine(),
        anat=anat.get_data(),
        threshold=threshold,
        cluster_th=cluster_th,
        slicer=slicer,
        cut_coords=cut_coords,
        design_matrices=design_matrices,
        subject_id="sub%03d" % subject_id,
        start_time=stats_start_time,
        title="GLM for subject %s" % ("sub%03d" % subject_id),

        # additional ``kwargs`` for more informative report
        TR=tr,
        n_scans=n_scans,
        hfcut=hfcut,
        drift_model=drift_model,
        hrf_model=hrf_model,
        paradigm=dict(("Run_%02i" % (run_id), paradigms[run_id - 1])
                      for run_id in run_ids),
        frametimes=dict(("Run_%02i" % (run_id), all_frametimes[run_id - 1])
                        for run_id in run_ids),
        # fwhm=fwhm
        )

    return fmri_glm
Exemplo n.º 50
0
        if side == False:
            # in the volume, compute a mask o fthe brain
            mask_array = compute_mask_files(fmri_series[0], epi_mask, True,
                                            inf_threshold, sup_threshold)[0]

        # Specify the contrasts
        contrasts = make_contrasts(sessions, odd=subject in
                                   ['sujet10', 'sujet12'])

        contrast_obj = {}
        for (sess, (session, fmri_data)) in enumerate(zip(
            sessions, fmri_series)):
            # create design matrices
            reg = all_reg[4 * sess: 4 * sess + 4] # fixme
            design_matrix = make_dmtx(
                frametimes, add_regs=reg_matrix, add_reg_names=reg,
                drift_model=drift_model, hfcut=hf_cut)

            # plot the design matrix
            ax = design_matrix.show()
            ax.set_position([.05, .25, .9, .65])
            ax.set_title('Design matrix')
            pylab.savefig(os.path.join(write_dir, 'design_matrix_%s.png') %\
                              session)
            # get the data
            if side == False:
                Y, _ = data_scaling(np.array([load(f).get_data()[mask_array]
                                              for f in fmri_data]))
                affine = load(fmri_data[0]).get_affine()
            else:
                Y, _ = data_scaling(np.array(
_duration = 6
epoch_duration = _duration * tr
conditions = ['rest', 'active'] * 8
duration = epoch_duration * np.ones(len(conditions))
onset = np.linspace(0, (len(conditions) - 1) * epoch_duration,
                    len(conditions))
paradigm = BlockParadigm(con_id=conditions, onset=onset, duration=duration)
hfcut = 2 * 2 * epoch_duration

# construct design matrix
nscans = len(subject_data.func[0])
frametimes = np.linspace(0, (nscans - 1) * tr, nscans)
drift_model = 'Cosine'
hrf_model = 'Canonical With Derivative'
design_matrix = make_dmtx(frametimes,
                          paradigm, hrf_model=hrf_model,
                          drift_model=drift_model, hfcut=hfcut)

# plot and save design matrix
ax = design_matrix.show()
ax.set_position([.05, .25, .9, .65])
ax.set_title('Design matrix')
dmat_outfile = os.path.join(subject_data.output_dir, 'design_matrix.png')
pl.savefig(dmat_outfile, bbox_inches="tight", dpi=200)

# specify contrasts
contrasts = {}
n_columns = len(design_matrix.names)
for i in xrange(paradigm.n_conditions):
    contrasts['%s' % design_matrix.names[2 * i]] = np.eye(n_columns)[2 * i]
Exemplo n.º 52
0
def runsub(sub,
           thisContrast,
           thisContrastStr,
           filterLen,
           filterOrd,
           paramEst,
           chunklen,
           alphas=np.logspace(0, 3, 20),
           debug=False,
           write=False,
           roi='grayMatter'):
    thisSub = {sub: subList[sub]}
    mc_params = lmvpa.loadmotionparams(paths, thisSub)
    beta_events = lmvpa.loadevents(paths, thisSub)
    dsdict = lmvpa.loadsubdata(paths, thisSub, m=roi, c='trial_type')
    thisDS = dsdict[sub]

    # savitsky golay filtering
    sg.sg_filter(thisDS, filterLen, filterOrd)
    # gallant group zscores before regression.

    # zscore w.r.t. rest trials
    # zscore(thisDS, param_est=('targets', ['rest']), chunks_attr='chunks')
    # zscore entire set. if done chunk-wise, there is no double-dipping (since we leave a chunk out at a time).
    zscore(thisDS, chunks_attr='chunks')

    # kay method: leave out a model run, use it to fit an HRF for each voxel
    # huth method: essentially use FIR
    # mumford method: deconvolution with canonical HRF

    # refit events and regress...
    # get timing data from timing files
    # rds, events = lmvpa.amendtimings(thisDS.copy(), beta_events[sub])
    rds, events = lmvpa.amendtimings(thisDS.copy(), beta_events[sub],
                                     contrasts)  # adding features

    # we can model out motion and just not use those betas.
    # Ridge
    if isinstance(thisContrast, basestring):
        thisContrast = [thisContrast]
    # instead of binarizing each one, make them parametric
    desX, rds = lmvpa.make_designmat(rds,
                                     events,
                                     time_attr='time_coords',
                                     condition_attr=thisContrast,
                                     design_kwargs={
                                         'hrf_model': 'canonical',
                                         'drift_model': 'blank'
                                     },
                                     regr_attrs=None)
    # want to collapse ap and cr, but have anim separate
    desX['motion'] = make_dmtx(rds.sa['time_coords'].value,
                               paradigm=None,
                               add_regs=mc_params[sub],
                               drift_model='blank')

    des = lmvpa.make_parammat(desX, hrf='canonical', zscore=True)

    # set chunklen and nchunks
    # split by language and pictures
    lidx = thisDS.chunks < thisDS.sa['chunks'].unique[len(
        thisDS.sa['chunks'].unique) / 2]
    pidx = thisDS.chunks >= thisDS.sa['chunks'].unique[len(
        thisDS.sa['chunks'].unique) / 2]
    ldes = cp.copy(des)
    pdes = cp.copy(des)

    ldes.matrix = ldes.matrix[lidx]
    pdes.matrix = pdes.matrix[pidx]
    nchunks = int(len(thisDS) * paramEst / chunklen)
    nboots = 50
    covarmat = None
    mus = None
    lwts, lalphas, lres, lceil = bsr.bootstrap_ridge(rds[lidx],
                                                     ldes,
                                                     chunklen=chunklen,
                                                     nchunks=nchunks,
                                                     cov0=covarmat,
                                                     mu0=mus,
                                                     part_attr='chunks',
                                                     mode='test',
                                                     alphas=alphas,
                                                     single_alpha=True,
                                                     normalpha=False,
                                                     nboots=nboots,
                                                     corrmin=.2,
                                                     singcutoff=1e-10,
                                                     joined=None,
                                                     plot=debug,
                                                     use_corr=True)

    pwts, palphas, pres, pceil = bsr.bootstrap_ridge(rds[pidx],
                                                     pdes,
                                                     chunklen=chunklen,
                                                     nchunks=nchunks,
                                                     part_attr='chunks',
                                                     mode='test',
                                                     alphas=alphas,
                                                     single_alpha=True,
                                                     normalpha=False,
                                                     nboots=nboots,
                                                     corrmin=.2,
                                                     singcutoff=1e-10,
                                                     joined=None,
                                                     plot=debug,
                                                     use_corr=True)
    print 'language ' + str(np.mean(lres))

    # pictures within
    print 'pictures: ' + str(np.mean(pres))

    # need to change outstring
    if write:
        from mvpa2.base import dataset
        map2nifti(thisDS, dataset.vstack([lres, pres])) \
            .to_filename(os.path.join(paths[0], 'Maps', 'Encoding', sub + '_' + roi + '_' + thisContrastStr +
                                      '_ridge_corrs.nii.gz'))
        map2nifti(thisDS, dataset.vstack([lwts, pwts])) \
            .to_filename(os.path.join(paths[0], 'Maps', 'Encoding', sub + '_' + roi + '_' + thisContrastStr +
                                      '_ridge_weights.nii.gz'))
        map2nifti(thisDS, dataset.vstack([lalphas, palphas])) \
            .to_filename(os.path.join(paths[0], 'Maps', 'Encoding', sub + '_' + roi + '_' + thisContrastStr +
                                      '_ridge_alphas.nii.gz'))
        map2nifti(thisDS, dataset.vstack([lceil, pceil])) \
            .to_filename(os.path.join(paths[0], 'Maps', 'Encoding', sub + '_' + roi + '_' + thisContrastStr +
                                      '_ridge_ceiling.nii.gz'))

    del lres, pres, lwts, pwts, lalphas, palphas, lceil, pceil
    crossSet = thisDS.copy()
    crossSet.chunks[lidx] = 1
    crossSet.chunks[pidx] = 2
    cwts, calphas, cres, cceil = bsr.bootstrap_ridge(crossSet,
                                                     des,
                                                     chunklen=chunklen,
                                                     nchunks=nchunks,
                                                     part_attr='chunks',
                                                     mode='test',
                                                     alphas=alphas,
                                                     single_alpha=True,
                                                     normalpha=False,
                                                     nboots=nboots,
                                                     corrmin=.2,
                                                     singcutoff=1e-10,
                                                     joined=None,
                                                     use_corr=True)
    print 'cross: ' + str(np.mean(cres))
    if write:
        map2nifti(thisDS, cres[0]).to_filename(
            os.path.join(
                paths[0], 'Maps', 'Encoding', sub + '_' + roi + '_' +
                thisContrastStr + '_P2L_ridge_corr.nii.gz'))
        map2nifti(thisDS, cres[1]).to_filename(
            os.path.join(
                paths[0], 'Maps', 'Encoding', sub + '_' + roi + '_' +
                thisContrastStr + '_L2P_ridge_corr.nii.gz'))

        map2nifti(thisDS, cwts[0]).to_filename(
            os.path.join(
                paths[0], 'Maps', 'Encoding', sub + '_' + roi + '_' +
                thisContrastStr + '_P2L_ridge_weights.nii.gz'))
        map2nifti(thisDS, cwts[1]).to_filename(
            os.path.join(
                paths[0], 'Maps', 'Encoding', sub + '_' + roi + '_' +
                thisContrastStr + '_L2P_ridge_weights.nii.gz'))

        map2nifti(thisDS, calphas[calphas.chunks == 1]).to_filename(
            os.path.join(
                paths[0], 'Maps', 'Encoding', sub + '_' + roi + '_' +
                thisContrastStr + '_P2L_ridge_alphas.nii.gz'))
        map2nifti(thisDS, calphas[calphas.chunks == 2]).to_filename(
            os.path.join(
                paths[0], 'Maps', 'Encoding', sub + '_' + roi + '_' +
                thisContrastStr + '_L2P_ridge_alphas.nii.gz'))

        map2nifti(thisDS, cceil[0]).to_filename(
            os.path.join(
                paths[0], 'Maps', 'Encoding', sub + '_' + roi + '_' +
                thisContrastStr + '_P2L_ridge_ceiling.nii.gz'))
        map2nifti(thisDS, cceil[1]).to_filename(
            os.path.join(
                paths[0], 'Maps', 'Encoding', sub + '_' + roi + '_' +
                thisContrastStr + '_L2P_ridge_ceiling.nii.gz'))
    del cres, cwts, calphas, cceil