hrf_est = _gamma_difference_hrf(1., oversampling=1./dt, time_length=hrf_length + dt,
                                          onset=0., delay=hrf_peak, undershoot=hrf_ushoot,
                                          dispersion=1., u_dispersion=1., ratio=0.167)
            f_hrf_est = interp1d(x_0, hrf_est)

            _, design, _, _ = generate_spikes_time_series(
                n_events=n_events, n_blank_events=n_blank_events,
                event_spacing=event_spacing, t_r=t_r, event_types=event_types,
                return_jitter=True, jitter_min=jitter_min, jitter_max=jitter_max,
                period_cut=period_cut, drift_order=drift_order, time_offset=10,
                modulation=None, seed=seed, f_hrf=f_hrf_est, hrf_length=hrf_length)

            # Testing with a GLM
            glm = FirstLevelGLM(mask=mask_img, t_r=t_r, standardize=True,
                                noise_model='ols')
            glm.fit(niimgs, design)
            #print 'n_timepoints, n_voxels: ', glm.results_[0][0].norm_resid.shape
            #print glm.results_[0][0].resid
            #print glm.results_[0][0].logL
            snr = np.linalg.norm(fmri, axis=3) / sigma_noise
            snr_db = 20 * (np.log10(np.linalg.norm(fmri, axis=3) / sigma_noise))
            print 'sigma_noise = ', sigma_noise
            print 'SNR = ', snr.mean()
            print 'SNR = ', snr_db.mean(), ' dB'

            print glm.results_[0][0].norm_resid.mean()
            norm_resid[isim, iest] = (np.linalg.norm(glm.results_[0][0].resid, axis=0)**2).mean()


    if not op.exists(fig_folder): os.makedirs(fig_folder)
    fig_name = op.join(fig_folder, 'glm_residual_norm_sigma' + str(sigma_noise))
write_dir = path.join(getcwd(), 'results')
if not path.exists(write_dir):
    mkdir(write_dir)

# Data and analysis parameters
data = datasets.fetch_fiac_first_level()
fmri_img = [data['func1'], data['func2']]
mean_img_ = mean_img(fmri_img[0])
design_files = [data['design_matrix1'], data['design_matrix2']]
design_matrices = [pd.DataFrame(np.load(df)['X']) for df in design_files]

# GLM specification
fmri_glm = FirstLevelGLM(data['mask'], standardize=False, noise_model='ar1')

# GLM fitting
fmri_glm.fit(fmri_img, design_matrices)

# compute fixed effects of the two runs and compute related images
n_columns = design_matrices[0].shape[1]
def pad_vector(contrast_, n_columns):
    return np.hstack((contrast_, np.zeros(n_columns - len(contrast_))))

contrasts = {'SStSSp_minus_DStDSp': pad_vector([1, 0, 0, -1], n_columns),
            'DStDSp_minus_SStSSp': pad_vector([-1, 0, 0, 1], n_columns),
            'DSt_minus_SSt': pad_vector([-1, -1, 1, 1], n_columns),
            'DSp_minus_SSp': pad_vector([-1, 1, -1, 1], n_columns),
            'DSt_minus_SSt_for_DSp': pad_vector([0, -1, 0, 1], n_columns),
            'DSp_minus_SSp_for_DSt': pad_vector([0, 0, -1, 1], n_columns),
            'Deactivation': pad_vector([-1, -1, -1, -1, 4], n_columns),
            'Effects_of_interest': np.eye(n_columns)[:5]}
                          optimize=optimize,
                          n_restarts_optimizer=n_restarts_optimizer,
                          zeros_extremes=zeros_extremes, f_mean=f_hrf)

        (hx, hy, hrf_var, resid_norm_sq, sigma_sq_resid) = gp.fit(ys, paradigm)

        print 'residual norm square = ', resid_norm_sq

        # Testing with a GLM
        mask_img = nb.Nifti1Image(np.ones((2, 2, 2)), affine=np.eye(4))
        masker = NiftiMasker(mask_img=mask_img)
        masker.fit()
        ys2 = np.ones((2, 2, 2, ys.shape[0])) * ys[np.newaxis, np.newaxis, np.newaxis, :]
        niimgs = nb.Nifti1Image(ys2, affine=np.eye(4))
        glm = FirstLevelGLM(mask=mask_img, t_r=t_r, standardize=True, noise_model='ols')
        glm.fit(niimgs, dm)
        norm_resid = (np.linalg.norm(glm.results_[0][0].resid, axis=0)**2).mean()
        ys_pred_glm = glm.results_[0][0].predicted[:, 0]

        # Predict GP
        # XXX: Do we need to predict for GLM???
        ys_pred, matrix, betas, resid = gp.predict(ys, paradigm)

        corr_gp = np.corrcoef(ys_pred, ys)[1, 0]
        corr_glm = np.corrcoef(ys_pred_glm, ys)[1, 0]

        print "corr glm: %s, corr gp: %s" % (corr_glm, corr_gp)

        data = {}
        data['ys'] = ys
        data['study'] = study
Esempio n. 4
0
# write directory
write_dir = path.join(getcwd(), 'results')
if not path.exists(write_dir):
    mkdir(write_dir)

# Data and analysis parameters
data = datasets.fetch_fiac_first_level()
fmri_files = [data['func1'], data['func2']]
design_files = [data['design_matrix1'], data['design_matrix2']]

# Load all the data into a common GLM
multi_session_model = FirstLevelGLM(data['mask'], standardize=False,
                                    noise_model='ar1')

# GLM fitting
multi_session_model.fit(fmri_files, design_files)

def make_fiac_contrasts(n_columns):
    """ Specify some contrasts for the FIAC experiment"""
    contrast = {}
    # the design matrices of both runs comprise 13 columns
    # the first 5 columns of the design matrices correspond to the following
    # conditions: ['SSt-SSp', 'SSt-DSp', 'DSt-SSp', 'DSt-DSp', 'FirstSt']

    def _pad_vector(contrast_, n_columns):
        return np.hstack((contrast_, np.zeros(n_columns - len(contrast_))))

    contrast['SStSSp_minus_DStDSp'] = _pad_vector([1, 0, 0, -1], n_columns)
    contrast['DStDSp_minus_SStSSp'] = - contrast['SStSSp_minus_DStDSp']
    contrast['DSt_minus_SSt'] = _pad_vector([- 1, - 1, 1, 1], n_columns)
    contrast['DSp_minus_SSp'] = _pad_vector([- 1, 1, - 1, 1], n_columns)
Esempio n. 5
0
write_dir = path.join(getcwd(), 'results')
if not path.exists(write_dir):
    mkdir(write_dir)

# Data and analysis parameters
data = datasets.fetch_fiac_first_level()
fmri_img = [data['func1'], data['func2']]
mean_img_ = mean_img(fmri_img[0])
design_files = [data['design_matrix1'], data['design_matrix2']]
design_matrices = [pd.DataFrame(np.load(df)['X']) for df in design_files]

# GLM specification
fmri_glm = FirstLevelGLM(data['mask'], standardize=False, noise_model='ar1')

# GLM fitting
fmri_glm.fit(fmri_img, design_matrices)

# compute fixed effects of the two runs and compute related images
n_columns = design_matrices[0].shape[1]


def pad_vector(contrast_, n_columns):
    return np.hstack((contrast_, np.zeros(n_columns - len(contrast_))))


contrasts = {
    'SStSSp_minus_DStDSp': pad_vector([1, 0, 0, -1], n_columns),
    'DStDSp_minus_SStSSp': pad_vector([-1, 0, 0, 1], n_columns),
    'DSt_minus_SSt': pad_vector([-1, -1, 1, 1], n_columns),
    'DSp_minus_SSp': pad_vector([-1, 1, -1, 1], n_columns),
    'DSt_minus_SSt_for_DSp': pad_vector([0, -1, 0, 1], n_columns),