def test_apply_affine(): XYZ = (100*(np.random.rand(10,11,12,3)-.5)).astype('int') T = np.eye(4) T[0:3,0:3] = np.random.rand(3,3) T[0:3,3] = 100*(np.random.rand(3)-.5) _XYZ = apply_affine(inverse_affine(T), apply_affine(T, XYZ)) assert_almost_equal(_XYZ, XYZ)
def __init__(self, image, control_points, sigma, grid_coords=False, affine=None): """ control_points: a Nx3 array of world coordinates if grid_coords is True, both `control_points` and `sigma` are interpreted in voxel coordinates. """ nparams = np.prod(control_points.shape) self._generic_init(image, affine, nparams) fromworld = inverse_affine(self._toworld) if grid_coords: self._control_points = apply_affine(self._toworld, control_points) tmp = control_points else: self._control_points = np.asarray(control_points) tmp = apply_affine(fromworld, control_points) # TODO : make sure the control point indices fall within the # subgrid and maybe raise a warning if rounding is too severe tmp = np.round(tmp).astype('int') self._idx_control_points = tuple([tmp[:,:,:,i] for i in range(tmp.shape[3])]) self._sigma = sigma*np.ones(3) self._grid_sigma = np.abs(np.diagonal(fromworld)[0:-1]*sigma) self._norma = np.prod(np.sqrt(2*np.pi)*self._grid_sigma)
def eval(self, T): if isinstance(T, GridTransform): # TODO: make sure T.shape matches self._source_image.shape affine = 0 Tv = apply_affine(self._target_fromworld, T[self._slices]) else: affine = 1 Tv = np.dot(self._target_fromworld, np.dot(T, self._source_toworld)) seed = self._interp if self._interp < 0: seed = - np.random.randint(maxint) _joint_histogram(self._joint_hist, self._source.flat, ## array iterator self._target, Tv, affine, seed) #self.source_hist = np.sum(self._joint_hist, 1) #self.target_hist = np.sum(self._joint_hist, 0) return _similarity(self._joint_hist, self._source_hist, self._target_hist, self._similarity, self._pdf, self._similarity_func)
def _sample_affine(self): if self._sampled == None: self._sampled = apply_affine(self._grid_affine, self.IJK()) else: self._sampled[:] = apply_affine(self._grid_affine, self.IJK())
def __call__(self, xyz): return apply_affine(self.__array__(), xyz)
def grid_coords(xyz, affine, from_world, to_world): Tv = np.dot(from_world, np.dot(affine, to_world)) XYZ = apply_affine(Tv, xyz) return XYZ[:,0], XYZ[:,1], XYZ[:,2]
def cluster_stats(zimg, mask, height_th, height_control='fpr', cluster_th=0, nulls={}): """ Return a list of clusters, each cluster being represented by a dictionary. Clusters are sorted by descending size order. Within each cluster, local maxima are sorted by descending depth order. Parameters ---------- zimg: z-score image mask: mask image height_th: cluster forming threshold height_control: string false positive control meaning of cluster forming threshold: 'fpr'|'fdr'|'bonferroni'|'none' cluster_th: cluster size threshold null_s : cluster-level calibration method: None|'rft'|array Note ---- This works only with three dimsnionsla data """ # Masking if len(mask.get_shape())>3: xyz = np.where((mask.get_data()>0).squeeze()) zmap = zimg.get_data().squeeze()[xyz] else: xyz = np.where(mask.get_data()>0) zmap = zimg.get_data()[xyz] xyz = np.array(xyz).T nvoxels = np.size(xyz, 0) # Thresholding if height_control == 'fpr': zth = sp_stats.norm.isf(height_th) elif height_control == 'fdr': zth = emp_null.FDR(zmap).threshold(height_th) elif height_control == 'bonferroni': zth = sp_stats.norm.isf(height_th/nvoxels) else: ## Brute-force thresholding zth = height_th pth = sp_stats.norm.sf(zth) above_th = zmap>zth if len(np.where(above_th)[0]) == 0: return None, None ## FIXME zmap_th = zmap[above_th] xyz_th = xyz[above_th,:] # Clustering ## Extract local maxima and connex components above some threshold ff = Field(np.size(zmap_th), field=zmap_th) ff.from_3d_grid(xyz_th, k=18) maxima, depth = ff.get_local_maxima(th=zth) labels = ff.cc() ## Make list of clusters, each cluster being a dictionary clusters = [] for k in range(labels.max() + 1): s = np.sum(labels == k) if s >= cluster_th: in_cluster = labels[maxima] == k m = maxima[in_cluster] d = depth[in_cluster] sorted = d.argsort()[::-1] clusters.append({'size':s, 'maxima':m[sorted], 'depth':d[sorted]}) ## Sort clusters by descending size order def smaller(c1, c2): return int(np.sign(c2['size']-c1['size'])) clusters.sort(cmp=smaller) # FDR-corrected p-values fdr_pvalue = emp_null.FDR(zmap).all_fdr()[above_th] # Default "nulls" if not nulls.has_key('zmax'): nulls['zmax'] = 'bonferroni' if not nulls.has_key('smax'): nulls['smax'] = None if not nulls.has_key('s'): nulls['s'] = None # Report significance levels in each cluster for c in clusters: maxima = c['maxima'] zscore = zmap_th[maxima] pval = sp_stats.norm.sf(zscore) # Replace array indices with real coordinates c['maxima'] = apply_affine(zimg.get_affine(), xyz_th[maxima]) c['zscore'] = zscore c['pvalue'] = pval c['fdr_pvalue'] = fdr_pvalue[maxima] # Voxel-level corrected p-values p = None if nulls['zmax'] == 'bonferroni': p = bonferroni(pval, nvoxels) elif isinstance(nulls['zmax'], np.ndarray): p = simulated_pvalue(zscore, nulls['zmax']) c['fwer_pvalue'] = p # Cluster-level p-values (corrected) p = None if isinstance(nulls['smax'], np.ndarray): p = simulated_pvalue(c['size'], nulls['smax']) c['cluster_fwer_pvalue'] = p # Cluster-level p-values (uncorrected) p = None if isinstance(nulls['s'], np.ndarray): p = simulated_pvalue(c['size'], nulls['s']) c['cluster_pvalue'] = p # General info info = {'nvoxels': nvoxels, 'threshold_z': zth, 'threshold_p': pth, 'threshold_pcorr': bonferroni(pth, nvoxels)} return clusters, info