plt.close() # show the design matrix graphically: plt.imshow(dct_design_mat, aspect=0.1, cmap="gray", interpolation="nearest") plt.savefig("../../../data/design_matrix/dct_design_mat.png") plt.close() ######### we take the mean volume (over time), and do a histogram of the values mean_vol = np.mean(data, axis=-1) # mask out the outer-brain noise using mean volumes over time. in_brain_mask = mean_vol > 8000 # We can use this 3D mask to index into our 4D dataset. # This selects all the voxel time-courses for voxels within the brain # (as defined by the mask) y = linear_modeling.smoothing(data, in_brain_mask) ######### Lastly, do t test on betas: X = dct_design_mat np.savetxt("../../../data/design_matrix/full_dct_design_mat.txt", X) beta, errors, MRSS, df = linear_modeling.beta_est(y, X) print("The mean MRSS across all voxels in mixed design is " + str(np.mean(MRSS))) np.savetxt("../../../data/beta/" + f1 + "_betas_hat_full_dct.txt", beta, newline="\r\n") # Visualizing betas for the middle slice # First reshape b_vols = np.zeros(vol_shape + (beta.shape[0],)) b_vols[in_brain_mask, :] = beta.T # Then plot them on the same plot with uniform scale
design = np.ones((len(convo), 4)) design[:, 1] = start design[:, 2] = end design[:, 3] = convo # reshape data to 2D vol_shape, n_time = data.shape[:-1], data.shape[-1] # shape_2d = (n_time, np.product(vol_shape)) # (133, 902629) # Smoothing raw data set mean_data = np.mean(data, -1) plt.figure(2) plt.hist(np.ravel(mean_data), bins=100) line = plt.axvline(8000, ls='--', color = 'red') mask = mean_data > 8000 smooth_data = linear_modeling.smoothing(data, mask) # Block linear regression betas_hat, s2, df = linear_modeling.beta_est(smooth_data, design) #(4, 194287) np.savetxt('../../../data/beta/' + f2 + '_betas_hat_block.txt', betas_hat.T, newline='\r\n') # Filling back to raw data shape beta_vols = np.zeros(vol_shape + (betas_hat.shape[0],)) #(91, 109, 91, 4) beta_vols[mask] = betas_hat.T # set regions outside mask as missing with np.nan mean_data[~mask] = np.nan beta_vols[~mask] = np.nan # T-test on null hypothesis, assume only input variance of beta3 [0,0,0,1] t_value, p_value = linear_modeling.t_stat(design, [0,1,1,1], betas_hat, s2, df) #(1, 194287) (1, 194287)
linear_drift = np.linspace(-1, 1, n_trs) design_mat[:, 6] = linear_drift quadratic_drift = linear_drift**2 quadratic_drift -= np.mean(quadratic_drift) design_mat[:, 7] = quadratic_drift ############## we take the mean volume (over time) mean_vol = np.mean(data, axis=-1) # mask out the outer-brain noise using mean volumes over time. in_brain_mask = mean_vol > 8000 # We can use this 3D mask to index into our 4D dataset. # This selects all the voxel time-courses for voxels within the brain # (as defined by the mask) ############## Spatially smoothing the raw data y = linear_modeling.smoothing(data, in_brain_mask) ############## Lastly, do t test on betas: X = design_mat ############## Get RSS from full model _, _, MRSS, df = linear_modeling.beta_est(y, X) RSS = MRSS * df ############## Test beta1 + beta4 + beta5 = 0 (block design) index1 = np.array([0, 3, 4]) X_1 = np.delete(X, index1, axis=1) _, _, MRSS1, df1 = linear_modeling.beta_est(y, X_1) RSS1 = MRSS1 * df1 ############## Test beta2 + beta3 = 0 (event related design)