def test_onesample(self): data, vardata, XYZ = make_data() # rfx calibration P = PT.permutation_test_onesample(data, XYZ, ndraws=ndraws) c = [(P.random_Tvalues[P.ndraws*(0.95)],None),(P.random_Tvalues[P.ndraws*(0.5)],10)] r = np.ones(data.shape[1],int) r[data.shape[1]/2:] *= 10 #p_values, cluster_results, region_results = P.calibrate(nperms=100, clusters=c, regions=[r]) # mfx calibration P = PT.permutation_test_onesample(data, XYZ, vardata=vardata, stat_id="student_mfx", ndraws=ndraws) p_values, cluster_results, region_results = P.calibrate(nperms=nperms, clusters=c, regions=[r])
def onesample_test(data_images, vardata_images, mask_images, stat_id, permutations=0, cluster_forming_th=0.01): """ Helper function for permutation-based mass univariate onesample group analysis. """ # Prepare arrays data, vardata, xyz, mask = prepare_arrays(data_images, vardata_images, mask_images) # Create one-sample permutation test instance ptest = permutation_test_onesample(data, xyz, vardata=vardata, stat_id=stat_id) # Compute z-map image zmap = np.zeros(data_images[0].get_shape()).squeeze() zmap[list(xyz)] = ptest.zscore() zimg = Image(zmap, data_images[0].get_affine()) # Compute mask image maskimg = Image(mask, data_images[0].get_affine()) # Multiple comparisons if permutations <= 0: return zimg, maskimg else: # Cluster definition: (threshold, diameter) cluster_def = (ptest.height_threshold(cluster_forming_th), None) # Calibration voxel_res, cluster_res, region_res = \ ptest.calibrate(nperms=permutations, clusters=[cluster_def]) nulls = {} nulls['zmax'] = ptest.zscore(voxel_res['perm_maxT_values']) nulls['s'] = cluster_res[0]['perm_size_values'] nulls['smax'] = cluster_res[0]['perm_maxsize_values'] # Return z-map image, mask image and dictionary of null distribution # for cluster sizes (s), max cluster size (smax) and max z-score (zmax) return zimg, maskimg, nulls
XYZ = np.array(np.where(mask==0)) p = XYZ.shape[1] data = np.random.randn(n,p) I = np.where(np.square(XYZ - XYZ.max(axis=1).reshape(-1,1)/2).sum(axis=0) <= r**2 )[0] data[:, I] += signal vardata = np.random.randn(n,p)**2 if axis==1: data = data.transpose() vardata = vardata.transpose() return data, vardata, XYZ ################################################################################ # Example for using permutation_test_onesample class data, vardata, XYZ = make_data() # rfx calibration P = PT.permutation_test_onesample(data,XYZ) # clusters definition (height threshold, max diameter) c = [(P.random_Tvalues[P.ndraws * (0.95)], None), (P.random_Tvalues[P.ndraws * (0.5)], 10)] # regions definition (label vector) r = np.ones(data.shape[1], int) r[data.shape[1]/2.:] *= 10 voxel_results, cluster_results, region_results = \ P.calibrate(nperms=100, clusters=c, regions=[r]) # mfx calibration P = PT.permutation_test_onesample(data, XYZ, vardata=vardata, stat_id="student_mfx") voxel_results, cluster_results, region_results = \ P.calibrate(nperms=100, clusters=c, regions=[r]) ################################################################################
# emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: import numpy as np from nipy.neurospin.group.permutation_test import permutation_test_onesample # Get group data f = np.load('data/offset_002.npz') data, vardata, xyz = f['mat'], f['var'], f['xyz'] # Create one-sample permutation test instance ptest = permutation_test_onesample(data, xyz, stat_id='wilcoxon') # Cluster definition: (threshold, diameter) # Note that a list of definitions can be passed to ptest.calibrate cluster_def = (ptest.height_threshold(0.01), None) print cluster_def # Multiple calibration # To get accurate pvalues, don't pass nperms (default is 1e4) # Yet it will take longer to run voxel_res, cluster_res, region_res = ptest.calibrate(nperms=100, clusters=[cluster_def]) # Simulated Zmax values for FWER correction simu_zmax = ptest.zscore(voxel_res['perm_maxT_values']) # Output regions clusters = cluster_res[0] ## This is a list because several cluster definitions can be accepted sizes = clusters['size_values'] clusters_Pcorr = clusters['size_Corr_p_values']