def get_results(randomise_outputs): """ Takes a list of randomise image outputs and loads them into a dictionary containing numpy array for further processing Parameters: ----------- randomise_outputs : list list of files output by randomise (ex. *vox_p_tstat1.nii.gz) Returns: -------- results : dict key = contrast name (ex. 'contrast1') value = numpy array containing 1 - p values of group comparison output by randomise """ results = {} for i in range(len(randomise_outputs)): pth, fname, ext = gift_utils.split_filename(randomise_outputs[i]) data_1minuspval = load_rand_img(randomise_outputs[i]) data_pval = 1 - data_1minuspval outfile = os.path.join(pth, fname + '.txt') np.savetxt(outfile, data_pval, fmt='%1.5f', delimiter='\t') print('p value matrix saved to %s'%(outfile)) conname = ''.join(['contrast', str(i+1)]) results[conname] = data_pval return results
# Save out text files of correlation and lags fnc_corr_outfile = os.path.join(datadir, fnc_corr_out) np.savetxt(fnc_corr_outfile, fnc_corr, fmt='%1.5f', delimiter='\t') fnc_corr_z_outfile = os.path.join(datadir, fnc_corr_z_out) np.savetxt(fnc_corr_z_outfile, fnc_corr_z, fmt='%1.5f', delimiter='\t') fnc_lag_outfile = os.path.join(datadir, fnc_lag_out) np.savetxt(fnc_lag_outfile, fnc_lag, fmt='%1.2f', delimiter='\t') ## Run group analysis ####################### exists, resultsdir = gu.make_dir(datadir,'randomise') resultsglob = os.path.join(datadir, 'FNCtb_*.csv') result_files = glob(resultsglob) for fnc_data_file in result_files: fnc_data = np.genfromtxt(fnc_data_file, names=None, dtype=float, delimiter=None) pth, fname, ext = gu.split_filename(fnc_data_file) fnc_img_fname = os.path.join(resultsdir, fname + '.nii.gz') fnc_saveimg = gu.save_img(fnc_data, fnc_img_fname) rand_basename = os.path.join(resultsdir, fname) p_uncorr_list, p_corr_list = ga.randomise(fnc_saveimg, rand_basename, des_file, con_file) uncorr_results = ga.get_results(p_uncorr_list) corr_results = ga.get_results(p_corr_list) fdr_results = {} for i in range(len(uncorr_results.keys())): conname = sorted(uncorr_results.keys())[i] fdr_corr_arr = ga.multi_correct(uncorr_results[conname]) fdr_results[conname] = gu.square_from_combos(fdr_corr_arr, nnodes)