예제 #1
0
def test_t_stat():
    psychopathy = [11.416,   4.514,  12.204,  14.835,
                   8.416,   6.563,  17.343, 13.02,
      		   15.19 ,  11.902,  22.721,  22.324]
    clammy = [0.389,  0.2  ,  0.241,  0.463,
              4.585,  1.097,  1.642,  4.972,                                
	      7.957,  5.585,  5.527,  6.964]
    age = [22.5,  25.3,  24.6,  21.4,
           20.7,  23.3,  23.8,  21.7,
           21.3, 25.2,  24.6,  21.8]
    X = np.column_stack((np.ones(12), clammy))
    Y = np.asarray(psychopathy)
    B, t, df, p = t_stat(Y, X)
    assert_array_equal((np.around(t[1][:6],6),np.around(p[1][:6],6)),
           ( [1.914389], [0.042295]))
예제 #2
0
def test_t_stat():
    psychopathy = [
        11.416, 4.514, 12.204, 14.835, 8.416, 6.563, 17.343, 13.02, 15.19,
        11.902, 22.721, 22.324
    ]
    clammy = [
        0.389, 0.2, 0.241, 0.463, 4.585, 1.097, 1.642, 4.972, 7.957, 5.585,
        5.527, 6.964
    ]
    age = [
        22.5, 25.3, 24.6, 21.4, 20.7, 23.3, 23.8, 21.7, 21.3, 25.2, 24.6, 21.8
    ]
    X = np.column_stack((np.ones(12), clammy))
    Y = np.asarray(psychopathy)
    B, t, df, p = t_stat(Y, X)
    assert_array_equal((np.around(t[1][:6], 6), np.around(p[1][:6], 6)),
                       ([1.914389], [0.042295]))
    X_matrix4 = np.loadtxt(txt_path[4])
    X_matrix = np.ones((len(X_matrix1), p))
    X_matrix[..., 1] = X_matrix1
    X_matrix[..., 2] = X_matrix2
    X_matrix[..., 3] = X_matrix3
    X_matrix[..., 4] = X_matrix4
    linear_drift = np.linspace(-1, 1, 240)
    quadratic_drift = linear_drift**2
    quadratic_drift -= np.mean(quadratic_drift)
    X_matrix[..., 5] = linear_drift
    X_matrix[..., 6] = quadratic_drift

    # smooth the data
    # use high resolution matrix and re-run the regression
    data_smooth = smoothing(data, 1, range(data.shape[-1]))
    beta_3d_smooth, t, df, p = t_stat(data_smooth, X_matrix)
    beta_3d_smooth_task = beta_3d_smooth[..., 1]
    beta_3d_smooth_gain = beta_3d_smooth[..., 2]
    beta_3d_smooth_loss = beta_3d_smooth[..., 3]
    beta_3d_smooth_dist = beta_3d_smooth[..., 4]

    location_of_txt = dirs[0]
    np.savetxt(location_of_txt + '/' + name[0:17] + "_beta_task.txt",
               beta_3d_smooth_task.ravel())
    np.savetxt(location_of_txt + '/' + name[0:17] + "_beta_gain.txt",
               beta_3d_smooth_gain.ravel())
    np.savetxt(location_of_txt + '/' + name[0:17] + "_beta_loss.txt",
               beta_3d_smooth_loss.ravel())
    np.savetxt(location_of_txt + '/' + name[0:17] + "_beta_dist.txt",
               beta_3d_smooth_dist.ravel())
print("\nAll betas generated from the multi glm analysis")
    X_matrix3 = np.loadtxt(txt_path[3])
    X_matrix4 = np.loadtxt(txt_path[4])
    X_matrix = np.ones((len(X_matrix1),p))
    X_matrix[...,1] = X_matrix1
    X_matrix[...,2] = X_matrix2
    X_matrix[...,3] = X_matrix3
    X_matrix[...,4] = X_matrix4
    linear_drift = np.linspace(-1, 1, 240)
    quadratic_drift = linear_drift ** 2
    quadratic_drift -= np.mean(quadratic_drift)
    X_matrix[...,5] = linear_drift
    X_matrix[...,6] = quadratic_drift

    # smooth the data
    # use high resolution matrix and re-run the regression
    data_smooth = smoothing(data,1,range(data.shape[-1]))
    beta_3d_smooth, t, df, p = t_stat(data_smooth,X_matrix)
    beta_3d_smooth_task = beta_3d_smooth[...,1]
    beta_3d_smooth_gain = beta_3d_smooth[...,2]
    beta_3d_smooth_loss = beta_3d_smooth[...,3]
    beta_3d_smooth_dist = beta_3d_smooth[...,4]

    location_of_txt= dirs[0]
    np.savetxt(location_of_txt + '/' +name[0:17]+ "_beta_task.txt",beta_3d_smooth_task.ravel())
    np.savetxt(location_of_txt + '/' +name[0:17]+ "_beta_gain.txt",beta_3d_smooth_gain.ravel())
    np.savetxt(location_of_txt + '/' +name[0:17]+ "_beta_loss.txt",beta_3d_smooth_loss.ravel())
    np.savetxt(location_of_txt + '/' +name[0:17]+ "_beta_dist.txt",beta_3d_smooth_dist.ravel())
print("\nAll betas generated from the multi glm analysis")
print("See project-epsilon/" + location_of_txt + " to for the txt files containing the betas")

from __future__ import division, print_function, absolute_import
import numpy as np
import sys
sys.path.append(".././utils")
import nibabel as nib
import matplotlib.pyplot as plt
from load_BOLD import *
from t_stat import *
from find_activated_voxel_functions import *
from convolution_normal_script import X_matrix
from convolution_high_res_script import X_matrix_high_res

location_of_data = "../../data/ds005/sub001/BOLD/task001_run001/"
data = load_img(1, 1)
beta, t, df, p = t_stat(data, X_matrix_high_res)
shape = data.shape[:3]

lst = find_activated_voxel(shape, p)
location_of_txt = "../txt_files/"

for i in range(1, len(lst) + 1):
    np.savetxt(location_of_txt + 'ds005_sub001_t1r1_position%s.txt' % (str(i)),
               lst[i - 1].ravel())
예제 #6
0
    #smooth the data set
    smooth_data = smoothing(data, 1, range(n_trs))
    #initialize design matrix for t test
    p = 7
    X_matrix = np.ones((data.shape[-1], p))
    #build our design matrix
    for cond in range(1,5):
        convolved = np.loadtxt(txt_path + name + '_conv_' + str(cond).zfill(3) + '_high_res.txt')
	#convolved = np.loadtxt(txt_path + name + '_conv_' + str(cond).zfill(3) + '_canonical.txt')
        X_matrix[:,cond] = convolved
    linear_drift = np.linspace(-1, 1, n_trs)
    X_matrix[:,5] = linear_drift
    quadratic_drift = linear_drift ** 2
    quadratic_drift -= np.mean(quadratic_drift)
    X_matrix[:,6] = quadratic_drift
    beta, t, df, p = t_stat(smooth_data, X_matrix)
    for cond in range(0,4):
        print("Starting test for condition " + str(cond+1))
        t_newshape = np.reshape(t[cond,:],vol_shape)
        t_newshape[~in_brain_mask]=np.nan
        t_T = np.zeros(vol_shape)
        for z in range(vol_shape[2]):
            t_T[:, :, z] = t_newshape[:,:, z].T
        t_plot = plot_mosaic(t_T)
        plt.imshow(t_plot,interpolation='nearest', cmap='seismic')
        zero_out=max(abs(np.nanmin(t_T)),np.nanmax(t_T))
        plt.title(name+'_t_statistics'+'_cond_'+'_%s'%(cond+1))
        plt.clim(-zero_out,zero_out)
        plt.colorbar()
        plt.savefig(dirs[1]+'/'+ name +'_t-test_'+'cond'+str(cond+1)+'.png')
        plt.close()
from __future__ import division, print_function, absolute_import
import numpy as np
import sys
sys.path.append(".././utils")
import nibabel as nib
import matplotlib.pyplot as plt
from load_BOLD import *
from t_stat import *
from find_activated_voxel_functions import *
from convolution_normal_script import X_matrix
from convolution_high_res_script import X_matrix_high_res

location_of_data = "../../data/ds005/sub001/BOLD/task001_run001/"
data = load_img(1,1)
beta, t, df,p=t_stat(data,X_matrix_high_res)
shape = data.shape[:3]

lst = find_activated_voxel(shape, p)
location_of_txt="../txt_files/"

for i in range(1,len(lst)+1):
    np.savetxt(location_of_txt+'ds005_sub001_t1r1_position%s.txt'%(str(i)),lst[i-1].ravel())