def correlation_map(data, cond_filename): tr_times = np.arange(0, 30, TR) convolved = conv_main(data.shape[-1], cond_filename, TR) corrs = np.zeros((data.shape[:-1])) for i in general_utils.vol_index_iter(data.shape[:-1]): corrs[i] = np.corrcoef(data[i], convolved)[1, 0] return corrs
def correlation_map(data, cond_filename): tr_times = np.arange(0, 30, TR) convolved = conv_main(data.shape[-1], cond_filename, TR) corrs = np.zeros((data.shape[:-1])) for i in general_utils.vol_index_iter(data.shape[:-1]): corrs[i] = np.corrcoef(data[i], convolved)[1,0] return corrs
def correlation_map(data, cond_filename): """ Generate a cross-correlation per voxel between BOLD signals and a convolved gamma baseline function. Assume that the first 5 images are already dropped. """ convolved = conv_main(data.shape[-1] + 5, cond_filename, TR)[5:] corrs = np.zeros((data.shape[:-1])) for i in gu.vol_index_iter(data.shape[:-1]): r = np.corrcoef(data[i], convolved)[1,0] if np.isnan(r): r = 0 corrs[i] = r return corrs
def correlation_map(data, cond_filename): """ Generate a cross-correlation per voxel between BOLD signals and a convolved gamma baseline function. Assume that the first 5 images are already dropped. """ convolved = conv_main(data.shape[-1] + 5, cond_filename, TR)[5:] corrs = np.zeros((data.shape[:-1])) for i in gu.vol_index_iter(data.shape[:-1]): r = np.corrcoef(data[i], convolved)[1, 0] if np.isnan(r): r = 0 corrs[i] = r return corrs
def correlation_map_linear(data, cond_filename): """ This is different from correlation_map in that it accepts a 2d data (n_samples, n_time_slices) so that it is suitable for working with brain masks. """ convolved = conv_main(data.shape[-1] + 5, cond_filename, TR)[5:] corrs = np.zeros((data.shape[:-1])) for i in range(data.shape[0]): r = np.corrcoef(data[i], convolved)[1,0] if np.isnan(r): r = 0 corrs[i] = r return corrs
def correlation_map_linear(data, cond_filename): """ This is different from correlation_map in that it accepts a 2d data (n_samples, n_time_slices) so that it is suitable for working with brain masks. """ convolved = conv_main(data.shape[-1] + 5, cond_filename, TR)[5:] corrs = np.zeros((data.shape[:-1])) for i in range(data.shape[0]): r = np.corrcoef(data[i], convolved)[1, 0] if np.isnan(r): r = 0 corrs[i] = r return corrs
def correlation_map_linear(data, cond_filename): """ This function computes the correlation matrix based on the baseline method. Input: data: brain image data cond_filename: condition file which contains the info about time time_course Output: correlation matrix """ convolved = conv_main(data.shape[-1] + 5, cond_filename, TR)[5:] corrs = np.zeros((data.shape[:-1])) for i in range(data.shape[0]): r = np.corrcoef(data[i], convolved)[1, 0] if np.isnan(r): r = 0 corrs[i] = r return corrs
def correlation_map_linear(data, cond_filename): """ This function computes the correlation matrix based on the baseline method. Input: data: brain image data cond_filename: condition file which contains the info about time time_course Output: correlation matrix """ convolved = conv_main(data.shape[-1] + 5, cond_filename, TR)[5:] corrs = np.zeros((data.shape[:-1])) for i in range(data.shape[0]): r = np.corrcoef(data[i], convolved)[1,0] if np.isnan(r): r = 0 corrs[i] = r return corrs
import project_config import numpy as np import matplotlib.pyplot as plt import scipy.stats from scipy.stats import gamma from stimuli_revised import events2neural from conv import conv_main """ Replace these variables before running the script """ cond_filename = "../../../ds115_sub010-014/sub013/model/model001/onsets/task001_run001/cond002.txt" n_trs = 132 TR = project_config.TR convolved = conv_main(n_trs, cond_filename, TR) #save convolved data in txt file: np.savetxt('results/conv_data.txt', convolved) # tr_times = np.arange(0, time_length, time_unit) # hrf_at_trs = hrf(tr_times) # len(hrf_at_trs) # plt.plot(tr_times, hrf_at_trs) # plt.xlabel('time') # plt.ylabel('HRF sampled every 2.5 seconds') # n_vols = 132 # neural_prediction = events2neural('cond_nb_tar.txt', # 0.1, n_vols) # all_tr_times = np.arange(n_vols*TR/time_unit)*.1 # plt.plot(all_tr_times, neural_prediction)
import numpy as np import matplotlib.pyplot as plt import scipy.stats from scipy.stats import gamma from stimuli_revised import events2neural from conv import conv_main """ Replace these variables before running the script """ cond_filename = "../../../ds115_sub010-014/sub013/model/model001/onsets/task001_run001/cond002.txt" n_trs = 132 TR = project_config.TR convolved = conv_main(n_trs, cond_filename, TR) #save convolved data in txt file: np.savetxt('results/conv_data.txt', convolved) # tr_times = np.arange(0, time_length, time_unit) # hrf_at_trs = hrf(tr_times) # len(hrf_at_trs) # plt.plot(tr_times, hrf_at_trs) # plt.xlabel('time') # plt.ylabel('HRF sampled every 2.5 seconds') # n_vols = 132 # neural_prediction = events2neural('cond_nb_tar.txt',