def gen_hardi(self, snr=20): bval = 3000 sph = load_sphere(refinement=2) gtab = GradientTable(bval * sph.v.T, b0_threshold=0) l_labels = gtab.bvecs.shape[0] val_base = 1e-6 * 300 S_data = np.zeros((self.res, self.res, l_labels), order='C') for (x, y) in itertools.product(range(self.res), repeat=2): mid = self.delta * (np.array([x, y]) + 0.5) norms = [np.sum(c['dirs'][x, y, :]**2)**0.5 for c in self.curves] if sum(norms) < 1e-6: mevals = np.array([[val_base, val_base, val_base]]) sticks = np.array([[1, 0, 0]]) fracs = [100] else: fracs = 100.0 * np.array(norms) / sum(norms) mevals = np.array([[(1.0 + norm * 4.0) * val_base, val_base, val_base] for norm in norms]) sticks = np.array([ np.array([c['dirs'][x, y, 0], c['dirs'][x, y, 1], 0]) / norm if norm > 1e-6 else np.array([1, 0, 0]) for c, norm in zip(self.curves, norms) ]) signal, _ = multi_tensor(gtab, mevals, S0=1., angles=sticks, fractions=fracs, snr=snr) S_data[x, y, :] = signal return gtab, S_data
def print_entropy(output_dir): gtab_file = os.path.join(output_dir, 'gtab.pickle') result_file = os.path.join(output_dir, 'result_raw.pickle') gtab = pickle.load(open(gtab_file, 'rb')) result = pickle.load(open(result_file, 'rb'))[0] b_vecs = gtab.bvecs[gtab.bvals > 0, ...] b_sph = load_sphere(vecs=b_vecs.T) f_gt, f_noisy = reconst_f(output_dir, b_sph) upd = result['u1'].reshape(f_gt.shape[0], -1) mask = crossmask.reshape(upd.shape[1:]) f_gt_fg = f_gt[:, mask] f_noisy_fg = f_noisy[:, mask] upd_fg = upd[:, mask] print(" Ground truth (fg): %.3f" % np.mean(entropy(f_gt_fg, b_sph.b).ravel())) print(" Noisy (fg): %.3f" % np.mean(entropy(f_noisy_fg, b_sph.b).ravel())) print("Reconstruction (fg): %.3f" % np.mean(entropy(upd_fg, b_sph.b).ravel())) print() f_gt_bg = f_gt[:, np.logical_not(mask)] f_noisy_bg = f_noisy[:, np.logical_not(mask)] upd_bg = upd[:, np.logical_not(mask)] print(" Ground truth (bg): %.4f" % np.mean(entropy(f_gt_bg, b_sph.b).ravel())) print(" Noisy (bg): %.4f" % np.mean(entropy(f_noisy_bg, b_sph.b).ravel())) print("Reconstruction (bg): %.4f" % np.mean(entropy(upd_bg, b_sph.b).ravel()))
def reconst_f(output_dir, b_sph=None): params_file = os.path.join(output_dir, 'params.pickle') data_file = os.path.join(output_dir, 'data.pickle') baseparams = pickle.load(open(params_file, 'rb')) data = pickle.load(open(data_file, 'rb')) gtab = data.gtab S_data = data.raw[data.slice] S_data_list = [S_data] if hasattr(data, 'ground_truth'): S_data_list.append(data.ground_truth[data.slice]) l_labels = np.sum(gtab.bvals > 0) imagedims = S_data.shape[:-1] b_vecs = gtab.bvecs[gtab.bvals > 0,...] if b_sph is None: b_sph = load_sphere(vecs=b_vecs.T) qball_sphere = dipy.core.sphere.Sphere(xyz=b_vecs) basemodel = CsaOdfModel(gtab, **baseparams['base']) fs = [] for S in S_data_list: f = basemodel.fit(S).odf(qball_sphere) f = np.clip(f, 0, np.max(f, -1)[..., None]) f = np.array(f.reshape(-1, l_labels).T, order='C') normalize_odf(f, b_sph.b) fs.append(f) return tuple(fs)
def synth_unimodals_linear(bval=3000, imagedims=(12, )): d_image = len(imagedims) n_image = np.prod(imagedims) sph = load_sphere(refinement=2) l_labels = sph.mdims['l_labels'] gtab = GradientTable(bval * sph.v.T, b0_threshold=0) S_data_orig = np.stack([ one_fiber_signal(gtab, r, snr=None, eval_factor=15) for r in np.linspace(-45, 45, n_image) ]).reshape(imagedims + (l_labels, )) return S_data_orig, S_data_orig.copy(), gtab
def synth_unimodals(bval=3000, imagedims=(8, ), jiggle=10, snr=None): d_image = len(imagedims) n_image = np.prod(imagedims) sph = load_sphere(refinement=2) l_labels = sph.mdims['l_labels'] gtab = GradientTable(bval * sph.v.T, b0_threshold=0) S_data_orig = np.stack([one_fiber_signal(gtab, 0, snr=None)]*n_image) \ .reshape(imagedims + (l_labels,)) S_data = np.stack([ one_fiber_signal(gtab, 0 + r, snr=snr) for r in jiggle * np.random.randn(n_image) ]).reshape(imagedims + (l_labels, )) return S_data_orig, S_data, gtab
def synth_bimodals(bval=3000, const_width=5, snr=None): imagedims = (const_width * 2, ) d_image = len(imagedims) n_image = np.prod(imagedims) sph = load_sphere(refinement=2) l_labels = sph.mdims['l_labels'] gtab = GradientTable(bval * sph.v.T, b0_threshold=0) S_data = np.stack( [two_fiber_signal(gtab, [0, 70], snr=None)] * const_width + [uniform_signal(gtab, snr=None)] * const_width).reshape(imagedims + (l_labels, )) S_data_orig = S_data.copy() if snr is not None: S_data[:] = add_noise(S_data_orig, snr=snr) return S_data_orig, S_data, gtab
v_diff1 = verts[k] - v v_diff2 = verts[k] + v if np.einsum('n,n->', v_diff1, v_diff1) > cutoff**2 \ and np.einsum('n,n->', v_diff2, v_diff2) > cutoff**2: vox[k] = 0 fin = np.zeros((l_labels, const_width * len(voxels)), order='C') for i, vox in enumerate(voxels): i1 = i * const_width i2 = (i + 1) * const_width fin[:, i1:i2] = np.tile(vox, (const_width, 1)).T normalize_odf(fin, sphere_vol) return fin mf = load_sphere(refinement=4) qball_sphere = dipy.core.sphere.Sphere(xyz=mf.v.T, faces=mf.faces.T) logging.info("Data generation...") x = list(range(0, 185, 5)) fin = synth_unimodal_odfs(qball_sphere, mf.b, [ 0, ] + x, const_width=1, tightness=30, cutoff=0.15) logging.info("Compute/load distances...")
def load_b_sph(output_dir): data_file = os.path.join(output_dir, 'data.pickle') data = pickle.load(open(data_file, 'rb')) gtab = data.gtab b_vecs = gtab.bvecs[gtab.bvals > 0,...] return load_sphere(vecs=b_vecs.T)
def init_spheres(self): b_vecs = self.gtab.bvecs[self.gtab.bvals > 0, ...] self.b_sph = load_sphere(vecs=b_vecs.T) self.dipy_sph = dipy.core.sphere.Sphere(xyz=b_vecs)