def test_load_large_nii(): """Test nii-loading function for large nii files.""" # Load example functional data in normal mode: aryFunc01, _, _ = util.load_nii((strDir + '/exmpl_data_func_3vols.nii.gz')) # Load example functional data in large-file mode: aryFunc02, _, _ = util.load_nii((strDir + '/exmpl_data_func_3vols.nii.gz'), varSzeThr=0.0) assert np.all(np.equal(aryFunc01, aryFunc02))
def test_main(): """Run main pyprf function and compare results with template.""" # -------------------------------------------------------------------------- # *** Preparations # Decimal places to round before comparing template and test results: varRnd = 3 # Load template result: aryTmplR2, _, _ = util.load_nii((strDir + '/exmpl_data_results_R2.nii.gz')) # Round template reults: aryTmplR2 = np.around(aryTmplR2.astype(np.float32), decimals=varRnd) # -------------------------------------------------------------------------- # -------------------------------------------------------------------------- # *** Test numpy version # Path of config file for tests: strCsvCnfgNp = (strDir + '/config_testing_numpy.csv') # Call main pyprf function: pyprf_main.pyprf(strCsvCnfgNp, lgcTest=True) # Load result: aryTestNpR2, _, _ = util.load_nii( (strDir + '/result/' + 'pRF_test_results_np_R2.nii.gz')) # Round test results: aryTestNpR2 = np.around(aryTestNpR2.astype(np.float32), decimals=varRnd) # Test whether the template and test results correspond: lgcTestNp = np.all(np.equal(aryTmplR2, aryTestNpR2)) # -------------------------------------------------------------------------- # -------------------------------------------------------------------------- # *** Test cython version # Path of config file for tests: strCsvCnfgCy = (strDir + '/config_testing_cython.csv') # Call main pyprf function: pyprf_main.pyprf(strCsvCnfgCy, lgcTest=True) # Load result: aryTestCyR2, _, _ = util.load_nii( (strDir + '/result/' + 'pRF_test_results_cy_R2.nii.gz')) # Round test results: aryTestCyR2 = np.around(aryTestCyR2.astype(np.float32), decimals=varRnd) # Test whether the template and test results correspond: lgcTestCy = np.all(np.equal(aryTmplR2, aryTestCyR2)) # -------------------------------------------------------------------------- # -------------------------------------------------------------------------- # *** Test tensorflow version # Path of config file for tests: strCsvCnfgTf = (strDir + '/config_testing_tensorflow.csv') # Call main pyprf function: pyprf_main.pyprf(strCsvCnfgTf, lgcTest=True) # Load result: aryTestTfR2, _, _ = util.load_nii( (strDir + '/result/' + 'pRF_test_results_tf_R2.nii.gz')) # Round test results: aryTestTfR2 = np.around(aryTestTfR2.astype(np.float32), decimals=varRnd) # Test whether the template and test results correspond: lgcTestTf = np.all(np.equal(aryTmplR2, aryTestTfR2)) # -------------------------------------------------------------------------- # -------------------------------------------------------------------------- # *** Clean up # Path of directory with results: strDirRes = strDir + '/result/' # Get list of files in results directory: lstFls = [f for f in os.listdir(strDirRes) if isfile(join(strDirRes, f))] # Delete results of test: for strTmp in lstFls: if '.nii' in strTmp: # print(strTmp) os.remove((strDirRes + '/' + strTmp)) elif '.npy' in strTmp: # print(strTmp) os.remove((strDirRes + '/' + strTmp)) # -------------------------------------------------------------------------- assert (lgcTestNp and lgcTestCy and lgcTestTf)
def test_main(): """Run main pyprf function and compare results with template.""" # ------------------------------------------------------------------------- # *** Preparations # Decimal places to round before comparing template and test results: varRnd = 3 # Load template result - R2: aryTmplR2, _, _ = util.load_nii((strDir + '/exmpl_data_results_R2.nii.gz')) # Load template result - eccentricity: aryTmplEcc, _, _ = util.load_nii( (strDir + '/exmpl_data_results_eccentricity.nii.gz')) # Load template result - polar angle: aryTmplPol, _, _ = util.load_nii( (strDir + '/exmpl_data_results_polar_angle.nii.gz')) # Load template result - SD: aryTmplSd, _, _ = util.load_nii((strDir + '/exmpl_data_results_SD.nii.gz')) # Load template result - parameter estimate condition 1: aryTmplPe01, _, _ = util.load_nii( (strDir + '/exmpl_data_results_PE_01.nii.gz')) # Load template result - parameter estimate condition 2: aryTmplPe02, _, _ = util.load_nii( (strDir + '/exmpl_data_results_PE_02.nii.gz')) # Round template reults: aryTmplR2 = np.around(aryTmplR2, decimals=varRnd).astype(np.float32) aryTmplEcc = np.around(aryTmplEcc, decimals=varRnd).astype(np.float32) aryTmplPol = np.around(aryTmplPol, decimals=varRnd).astype(np.float32) aryTmplSd = np.around(aryTmplSd, decimals=varRnd).astype(np.float32) aryTmplPe01 = np.around(aryTmplPe01, decimals=varRnd).astype(np.float32) aryTmplPe02 = np.around(aryTmplPe02, decimals=varRnd).astype(np.float32) # ------------------------------------------------------------------------- # *** Test pyprf main pipeline # Test numpy, cython, and tensorflow version. List with version # abbreviations: lstVrsn = ['np', 'cy', 'cy_hdf5'] # 'np_hdf5' # Path of config file for tests (version abbreviation left open): strCsvCnfg = (strDir + '/config_testing_{}.csv') for strVrsn in lstVrsn: # Call main pyprf function: pyprf_main.pyprf(strCsvCnfg.format(strVrsn), lgcTest=True) # Load result - R2: aryTestR2, _, _ = util.load_nii( (strDir + '/result/' + 'pRF_test_results_{}_R2.nii.gz'.format(strVrsn))) # Load result - eccentricity: aryTestEcc, _, _ = util.load_nii( (strDir + '/result/' + 'pRF_test_results_{}_eccentricity.nii.gz'.format(strVrsn))) # Load result - polar angle: aryTestPol, _, _ = util.load_nii( (strDir + '/result/' + 'pRF_test_results_{}_polar_angle.nii.gz'.format(strVrsn))) # Load result - SD: aryTestSd, _, _ = util.load_nii( (strDir + '/result/' + 'pRF_test_results_{}_SD.nii.gz'.format(strVrsn))) # Load result - parameter estimate condition 1: aryTestlPe01, _, _ = util.load_nii( (strDir + '/result/' + 'pRF_test_results_{}_PE_01.nii.gz'.format(strVrsn))) # Load result - parameter estimate condition 2: aryTestlPe02, _, _ = util.load_nii( (strDir + '/result/' + 'pRF_test_results_{}_PE_02.nii.gz'.format(strVrsn))) # Round test results: aryTestR2 = np.around(aryTestR2, decimals=varRnd).astype(np.float32) aryTestEcc = np.around(aryTestEcc, decimals=varRnd).astype(np.float32) aryTestPol = np.around(aryTestPol, decimals=varRnd).astype(np.float32) aryTestSd = np.around(aryTestSd, decimals=varRnd).astype(np.float32) aryTestlPe01 = np.around(aryTestlPe01, decimals=varRnd).astype(np.float32) aryTestlPe02 = np.around(aryTestlPe02, decimals=varRnd).astype(np.float32) # Test whether the template and test results correspond: # print('np.max(np.abs(np.subtract(aryTmplR2, aryTestR2)))') # print(np.max(np.abs(np.subtract(aryTmplR2, aryTestR2)))) lgcTestR2 = np.all(np.equal(aryTmplR2, aryTestR2)) lgcTestEcc = np.all(np.equal(aryTmplEcc, aryTestEcc)) lgcTestPol = np.all(np.equal(aryTmplPol, aryTestPol)) lgcTestSd = np.all(np.equal(aryTmplSd, aryTestSd)) lgcTestPe01 = np.all(np.equal(aryTmplPe01, aryTestlPe01)) lgcTestPe02 = np.all(np.equal(aryTmplPe02, aryTestlPe02)) # Did version pass the test? assert lgcTestR2 assert lgcTestEcc assert lgcTestPol assert lgcTestSd assert lgcTestPe01 assert lgcTestPe02 # ------------------------------------------------------------------------- # *** Clean up testing results # Path of directory with results: strDirRes = strDir + '/result/' # Get list of files in results directory: lstFls = [f for f in os.listdir(strDirRes) if isfile(join(strDirRes, f))] # Delete results of test: for strTmp in lstFls: if '.nii' in strTmp: # print(strTmp) os.remove((strDirRes + '/' + strTmp)) elif '.npy' in strTmp: # print(strTmp) os.remove((strDirRes + '/' + strTmp)) elif '.hdf5' in strTmp: # print(strTmp) os.remove((strDirRes + '/' + strTmp)) # ------------------------------------------------------------------------- # *** Clean up intermediate results (hdf5 files) # Path of directory with time courses converted to hdf5: strDirRes = strDir + '/' # Get list of files in results directory: lstFls = [f for f in os.listdir(strDirRes) if isfile(join(strDirRes, f))] # Delete results of test: for strTmp in lstFls: if '.hdf5' in strTmp: # print(strTmp) os.remove((strDirRes + '/' + strTmp))
def pre_pro_func(strPathNiiMask, lstPathNiiFunc, lgcLinTrnd=True, varSdSmthTmp=2.0, varSdSmthSpt=0.0, varPar=10): """ Load & preprocess functional data. Parameters ---------- strPathNiiMask: str Path or mask used to restrict pRF model finding. Only voxels with a value greater than zero in the mask are considered. lstPathNiiFunc : list List of paths of functional data (nii files). lgcLinTrnd : bool Whether to perform linear trend removal on functional data. varSdSmthTmp : float Extent of temporal smoothing that is applied to functional data and pRF time course models, [SD of Gaussian kernel, in seconds]. If `zero`, no temporal smoothing is applied. varSdSmthSpt : float Extent of spatial smoothing [SD of Gaussian kernel, in mm]. If `zero`, no spatial smoothing is applied. varPar : int Number of processes to run in parallel (multiprocessing). Returns ------- vecLgcMsk : np.array 1D numpy array with logial values. Externally supplied mask (e.g grey matter mask). Voxels that are `False` in the mask are excluded. hdrMsk : nibabel-header-object Nii header of mask. aryAff : np.array Array containing 'affine', i.e. information about spatial positioning of mask nii data. vecLgcVar : np.array 1D numpy array containing logical values. One value per voxel after mask has been applied. If `True`, the variance of the voxel's time course is larger than zero, and the voxel is included in the output array (`aryFunc`). If `False`, the varuance of the voxel's time course is zero, and the voxel has been excluded from the output (`aryFunc`). This is to avoid problems in the subsequent model fitting. This array is necessary to put results into original dimensions after model fitting. aryFunc : np.array 2D numpy array containing preprocessed functional data, of the form aryFunc[time, voxel]. tplNiiShp : tuple Spatial dimensions of input nii data (number of voxels in x, y, z direction). The data are reshaped during preprocessing, this information is needed to fit final output into original spatial dimensions. Notes ----- Functional data is loaded from disk. Temporal and spatial smoothing can be applied. The functional data is reshaped, into the form aryFunc[time, voxel]. A mask is applied (externally supplied, e.g. a grey matter mask). Subsequently, the functional data is de-meaned, and intensities are converted into z-scores. """ print('------Load & preprocess nii data') # Load mask (to restrict model fitting): aryMask, hdrMsk, aryAff = load_nii(strPathNiiMask) # Mask is loaded as float32, but is better represented as integer: aryMask = np.array(aryMask).astype(np.int16) # Number of non-zero voxels in mask: # varNumVoxMsk = int(np.count_nonzero(aryMask)) # Dimensions of nii data: tplNiiShp = aryMask.shape # Total number of voxels: varNumVoxTlt = (tplNiiShp[0] * tplNiiShp[1] * tplNiiShp[2]) # Reshape mask (flatten): vecMaskFlt = np.reshape(aryMask, varNumVoxTlt) # Boolean mask: vecLgcMsk = np.greater(vecMaskFlt.astype(np.int16), np.array([0], dtype=np.int16)[0]) # List for arrays with functional data (possibly several runs): lstFunc = [] # Number of runs: varNumRun = len(lstPathNiiFunc) # Loop through runs and load data: for idxRun in range(varNumRun): print(('---------Preprocess run ' + str(idxRun + 1))) # Load 4D nii data: aryTmpFunc, _, _ = load_nii(lstPathNiiFunc[idxRun]) # Dimensions of nii data (including temporal dimension; spatial # dimensions need to be the same for mask & functional data): tplNiiShp = aryTmpFunc.shape # Preprocessing of nii data: aryTmpFunc = pre_pro_par(aryTmpFunc, aryMask=aryMask, lgcLinTrnd=lgcLinTrnd, varSdSmthTmp=varSdSmthTmp, varSdSmthSpt=varSdSmthSpt, varPar=varPar) # Reshape functional nii data, from now on of the form # aryTmpFunc[time, voxel]: aryTmpFunc = np.reshape(aryTmpFunc, [varNumVoxTlt, tplNiiShp[3]]).T # Apply mask: aryTmpFunc = aryTmpFunc[:, vecLgcMsk] # De-mean functional data: aryTmpFunc = np.subtract( aryTmpFunc, np.mean(aryTmpFunc, axis=0, dtype=np.float32)[None, :]) # Convert intensities into z-scores. If there are several pRF runs, # these are concatenated. Z-scoring ensures that differences in mean # image intensity and/or variance between runs do not confound the # analysis. Possible enhancement: Explicitly model across-runs variance # with a nuisance regressor in the GLM. aryTmpStd = np.std(aryTmpFunc, axis=0) # In order to avoid devision by zero, only divide those voxels with a # standard deviation greater than zero: aryTmpLgc = np.greater(aryTmpStd.astype(np.float32), np.array([0.0], dtype=np.float32)[0]) # Z-scoring: aryTmpFunc[:, aryTmpLgc] = np.divide(aryTmpFunc[:, aryTmpLgc], aryTmpStd[None, aryTmpLgc]) # Set voxels with a variance of zero to intensity zero: aryTmpLgc = np.not_equal(aryTmpLgc, True) aryTmpFunc[:, aryTmpLgc] = np.array([0.0], dtype=np.float32)[0] # Put preprocessed functional data of current run into list: lstFunc.append(aryTmpFunc) del (aryTmpFunc) # Put functional data from separate runs into one array. 2D array of the # form aryFunc[time, voxel] aryFunc = np.concatenate(lstFunc, axis=0).astype(np.float32, copy=False) del (lstFunc) # Voxels that are outside the brain and have no, or very little, signal # should not be included in the pRF model finding. We take the variance # over time and exclude voxels with a suspiciously low variance. Because # the data given into the cython or GPU function has float32 precision, we # calculate the variance on data with float32 precision. aryFuncVar = np.var(aryFunc, axis=0, dtype=np.float32) # Is the variance greater than zero? vecLgcVar = np.greater(aryFuncVar, np.array([0.0001]).astype(np.float32)[0]) # Array with functional data for which conditions (mask inclusion and # cutoff value) are fullfilled: aryFunc = aryFunc[:, vecLgcVar] return vecLgcMsk, hdrMsk, aryAff, vecLgcVar, aryFunc, tplNiiShp