Beispiel #1
0
    def __init__(self, data, design_matrix, mask=None, formula=None, model=def_model, method=None, niter=def_niter):

        # Convert input data and design into sequences
        if not hasattr(data, "__iter__"):
            data = [data]
        if not hasattr(design_matrix, "__iter__"):
            design_matrix = [design_matrix]

        # configure spatial properties
        # the 'sampling' direction is assumed to be the last
        # TODO: check that all input images have the same shape and
        # that it's consistent with the mask
        nomask = mask == None
        if nomask:
            self.xyz = None
            self.axis = len(data[0].get_shape()) - 1
        else:
            self.xyz = np.where(mask.get_data() > 0)
            self.axis = 1

        self.spatial_shape = data[0].get_shape()[0:-1]
        self.affine = data[0].get_affine()

        self.glm = []
        for i in range(len(data)):
            if not isinstance(design_matrix[i], np.ndarray):
                raise ValueError("Invalid design matrix")
            if nomask:
                Y = data[i].get_data()
            else:
                Y = data[i].get_data()[self.xyz]
            X = design_matrix[i]

            self.glm.append(glm(Y, X, axis=self.axis, formula=formula, model=model, method=method, niter=niter))
Beispiel #2
0
 def _run_interface(self, runtime):
             
     beta_nii = nb.load(self.inputs.beta)
     if isdefined(self.inputs.mask):
         mask = nb.load(self.inputs.mask).get_data() > 0
     else:
         mask = np.ones(beta_nii.shape[:3]) == 1
     
     
     glm = GLM.glm()
     nii = nb.load(self.inputs.beta)
     glm.beta = beta_nii.get_data().copy()[mask,:].T
     glm.nvbeta = self.inputs.nvbeta
     glm.s2 = nb.load(self.inputs.s2).get_data().copy()[mask]
     glm.dof = self.inputs.dof
     glm._axis = self.inputs.axis
     glm._constants = self.inputs.constants
     
     reg_names = self.inputs.reg_names
     
     self._stat_maps = []
     self._p_maps = []
     self._z_maps = []
     for contrast_def in self.inputs.contrasts:
         name = contrast_def[0]
         type = contrast_def[1]
         contrast = np.zeros(len(reg_names))
         
         for i, reg_name in enumerate(reg_names):
             if reg_name in contrast_def[2]:
                 idx = contrast_def[2].index(reg_name)
                 contrast[i] = contrast_def[3][idx]
         
         est_contrast = glm.contrast(contrast)
         
         stat_map = np.zeros(mask.shape)
         stat_map[mask] = est_contrast.stat().T
         stat_map_file = os.path.abspath(name + "_stat_map.nii")
         nb.save(nb.Nifti1Image(stat_map, nii.get_affine()), stat_map_file)
         self._stat_maps.append(stat_map_file)
         
         p_map = np.zeros(mask.shape)
         p_map[mask] = est_contrast.pvalue().T
         p_map_file = os.path.abspath(name + "_p_map.nii")
         nb.save(nb.Nifti1Image(p_map, nii.get_affine()), p_map_file)
         self._p_maps.append(p_map_file)
         
         z_map = np.zeros(mask.shape)
         z_map[mask] = est_contrast.zscore().T
         z_map_file = os.path.abspath(name + "_z_map.nii")
         nb.save(nb.Nifti1Image(z_map, nii.get_affine()), z_map_file)
         self._z_maps.append(z_map_file)
     
     return runtime
Beispiel #3
0
def GLMFit(file, designMatrix,  output_glm, outputCon,
           fit="Kalman_AR1", mask_url=None):
    """
    Call the GLM Fit function with apropriate arguments

    Parameters
    ----------
    file, string or list of strings,
          path of the fMRI data file(s)
    designmatrix, string, path of the design matrix .csv file 
    mask_url=None string, path of the mask file
          if None, no mask is applied
    output_glm, string, 
                path of the output glm .npz dump
    outputCon, string,
               path of the output configobj contrast object
    fit= 'Kalman_AR1', string to be chosen among
         "Kalman_AR1", "Ordinary Least Squares", "Kalman"
         that represents both the model and the fit method
                
    Returns
    -------
    glm, a nipy.neurospin.glm.glm instance representing the GLM

    fixme: mask should be optional
    """
    if fit == "Kalman_AR1":
        model = "ar1"
        method = "kalman"
    elif fit == "Ordinary Least Squares":
        method = "ols"
        model="spherical"
    elif fit == "Kalman":
        method = "kalman"
        model = "spherical"
    
    
    import DesignMatrix as dm
    names, X = dm.load_dmtx_from_csv(designMatrix)
      
    Y = load_image(file, mask_url)

    import nipy.neurospin.glm as GLM
    glm = GLM.glm()
    glm.fit(Y.T, X, method=method, model=model)
    glm.save(output_glm)      
    cobj = ConfigObj(outputCon)
    cobj["DesignFilePath"] = designMatrix
    cobj["mask_url"] = mask_url
    cobj.write()   

    return glm
Beispiel #4
0
def linear_model_fit(data_images, mask_images, design_matrix, vector):
    """
    Helper function for group data analysis using arbitrary design matrix
    """

    # Prepare arrays
    data, vardata, xyz, mask = prepare_arrays(data_images, None, mask_images)

    # Create glm instance
    G = glm(data, design_matrix)

    # Compute requested contrast
    c = G.contrast(vector)

    # Compute z-map image
    zmap = np.zeros(data_images[0].get_shape()).squeeze()
    zmap[list(xyz)] = c.zscore()
    zimg = Image(zmap, data_images[0].get_affine())

    return zimg
Beispiel #5
0
# axis defines the "time direction" 

y = np.random.randn(dimt, dimx*dimy*dimz)
axis = 0

"""
y = random.randn(dimx, dimt, dimy, dimz)
axis = 1
"""

X = np.array([np.ones(dimt), range(dimt)])
X = X.transpose() ## the design matrix X must have dimt lines

#mod = glm.glm(y, X, axis=axis) ## default is spherical model using OLS 
mod = glm.glm(y, X, axis=axis, model='ar1')
#mod = glm.glm(y, X, formula='y~x1+(x1|x2)', axis=axis, model='mfx')

##mod.save('toto')
##mod = glm.load('toto')

# Define a t contrast
tcon = mod.contrast([1,0]) 

# Compute the t-stat
t = tcon.stat()
## t = tcon.stat(baseline=1) to test effects > 1 

# Compute the p-value
p = tcon.pvalue()
fmri_data = surrogate_4d_dataset(shape=shape, n_scans=n_scans)[0]

# if you want to save it as an image
data_file = op.join(swd, 'fmri_data.nii')
save(fmri_data, data_file)

########################################
# Perform a GLM analysis
########################################

# GLM fit
Y = fmri_data.get_data()
model = "ar1"
method = "kalman"
glm = GLM.glm()
mp.pcolor(X)
mp.show()
glm.fit(Y.T, X, method=method, model=model)
#explained = np.dot(X,glm.beta.reshape(X.shape[1],-1)).reshape(Y.T.shape).T
#residuals = Y - explained
#residuals_image = Nifti1Image(np.reshape(residuals, shape), affine)

# specify the contrast [1 -1 0 ..]
contrast = np.zeros(X.shape[1])
contrast[0] = 1
contrast[1] = -1
my_contrast = glm.contrast(contrast)

# compute the constrast image related to it
zvals = my_contrast.zscore()
Beispiel #7
0
import numpy as np
import pylab as p

from nipy.neurospin import glm

# Load data 
data = np.loadtxt('ad_data2.txt')
age = data[:,0]

# Linear regression
X = np.asarray([age, np.ones(len(age))]).T
Y = data[:,1:] # csf, gm, wm 
mod = glm.glm(Y, X)

# t-test: return z-scores 
# Note that absolute values indicate significance and 
# signs whether effects are positive or negative 
c = mod.contrast([1,0])
z = c.zscore()
pval = c.pvalue() 

# Display
fit = np.dot(X, mod.beta)
p.plot(age, Y[:,0], 'b+')
p.plot(age, fit[:,0], 'b')
p.plot(age, Y[:,1], 'r+')
p.plot(age, fit[:,1], 'r')
p.plot(age, Y[:,2], 'g+')
p.plot(age, fit[:,2], 'g')
fmri_data = surrogate_4d_dataset(shape=shape, n_scans=n_scans)[0]

# if you want to save it as an image
data_file = op.join(swd,'fmri_data.nii')
save(fmri_data, data_file)

########################################
# Perform a GLM analysis
########################################

# GLM fit
Y = fmri_data.get_data()
model = "ar1"
method = "kalman"
glm = GLM.glm()
mp.pcolor(X)
mp.show()
glm.fit(Y.T, X, method=method, model=model)
#explained = np.dot(X,glm.beta.reshape(X.shape[1],-1)).reshape(Y.T.shape).T
#residuals = Y - explained 
#residuals_image = Nifti1Image(np.reshape(residuals, shape), affine)


# specify the contrast [1 -1 0 ..]
contrast = np.zeros(X.shape[1])
contrast[0] = 1
contrast[1] = -1
my_contrast = glm.contrast(contrast)

# compute the constrast image related to it
Beispiel #9
0
## Models
#X = np.asarray([baseline, conditions, saturation]).T 

#X = np.asarray([conditions, saturation, subject_factor]).T
#formula='y~1+x1+x2+(1|x3)+(x1|x3)+(x2|x3)'
#contrasts = ([0,1,0], [0,0,1])

X = np.asarray([conditions, subject_factor]).T 
formula = 'y~x1+(1|x2)'
contrasts = ([1,0], )

# Test: reduce data
y = Y.reshape([Y.shape[0],Y.shape[1],ndata])
y = y[0:3,0:3,:]

# Standard t-stat
print('Starting fitting...')
tic = time()
m = glm(y, X, axis=2, formula=formula, model='mfx')
dt = time()-tic
print('  duration = %d sec' % dt)
m.save('dump')


# Linear contrast
for con in contrasts:
    c = m.contrast(con)
    t = c.stat() 
    display(t, title='Linear contrast')
Beispiel #10
0
    def _run_interface(self, runtime):
        
        session_info = self.inputs.session_info
        
        functional_runs = self.inputs.session_info[0]['scans']
        if isinstance(functional_runs, str):
            functional_runs = [functional_runs]
        nii = nb.load(functional_runs[0])              
        data = nii.get_data()

        
        if isdefined(self.inputs.mask):
            mask = nb.load(self.inputs.mask).get_data() > 0
        else:
            mask = np.ones(nii.shape[:3]) == 1
            
        timeseries = data.copy()[mask,:]
        del data
        
        for functional_run in functional_runs[1:]:
            nii = nb.load(functional_run)
            data = nii.get_data()
            npdata = data.copy()
            del data          
            timeseries = np.concatenate((timeseries,npdata[mask,:]), axis=1)
            del npdata
            
        nscans = timeseries.shape[1]
        
        if 'hpf' in session_info[0].keys():
            hpf = session_info[0]['hpf']
            drift_model=self.inputs.drift_model
        else:
            hpf=0
            drift_model = "Blank"
        
        reg_names = []
        for reg in session_info[0]['regress']:
            reg_names.append(reg['name'])
        
        reg_vals = np.zeros((nscans,len(reg_names)))
        for i in range(len(reg_names)):
            reg_vals[:,i] = np.array(session_info[0]['regress'][i]['val']).reshape(1,-1)
        
        
        frametimes= np.linspace(0, (nscans-1)*self.inputs.TR, nscans)
        
        conditions = []
        onsets = []
        duration = []
        
        for i,cond in enumerate(session_info[0]['cond']):
            onsets += cond['onset']
            conditions += [cond['name']]*len(cond['onset'])
            if len(cond['duration']) == 1:
                duration += cond['duration']*len(cond['onset'])
                
        
        paradigm =  dm.BlockParadigm(con_id=conditions, onset=onsets, duration=duration)
        design_matrix, self._reg_names = dm.dmtx_light(frametimes, paradigm, drift_model=drift_model, hfcut=hpf,
               hrf_model=self.inputs.hrf_model, 
               add_regs=reg_vals,
               add_reg_names=reg_names
               )
        if self.inputs.normalize_design_matrix:
            for i in range(len(self._reg_names)-1):
                design_matrix[:,i] = (design_matrix[:,i]-design_matrix[:,i].mean())/design_matrix[:,i].std()
                
        if self.inputs.plot_design_matrix:
            pylab.pcolor(design_matrix)
            pylab.savefig("design_matrix.pdf")
            pylab.close()
            pylab.clf()
        
        glm = GLM.glm()
        glm.fit(timeseries.T, design_matrix, method=self.inputs.method, model=self.inputs.model)
        
        
        self._beta_file = os.path.abspath("beta.nii")
        beta = np.zeros(mask.shape + (glm.beta.shape[0],))
        beta[mask,:] = glm.beta.T
        nb.save(nb.Nifti1Image(beta, nii.get_affine()), self._beta_file)
        
        self._s2_file = os.path.abspath("s2.nii")
        s2 = np.zeros(mask.shape)
        s2[mask] = glm.s2
        nb.save(nb.Nifti1Image(s2, nii.get_affine()), self._s2_file)
        
        if self.inputs.save_residuals:
            explained = np.dot(design_matrix,glm.beta)
            residuals = np.zeros(mask.shape + (nscans,))
            residuals[mask,:] = timeseries - explained.T
            self._residuals_file = os.path.abspath("residuals.nii")
            nb.save(nb.Nifti1Image(residuals, nii.get_affine()), self._residuals_file)
        
        self._nvbeta = glm.nvbeta
        self._dof = glm.dof
        self._constants = glm._constants
        self._axis = glm._axis
        if self.inputs.model == "ar1":
            self._a_file = os.path.abspath("a.nii")
            a = np.zeros(mask.shape)
            a[mask] = glm.a.squeeze()
            nb.save(nb.Nifti1Image(a, nii.get_affine()), self._a_file)
        self._model = glm.model
        self._method = glm.method
        
        return runtime