def test_predict_dti(): with nbtmp.InTemporaryDirectory() as tmpdir: fbval = op.join(tmpdir, 'dti.bval') fbvec = op.join(tmpdir, 'dti.bvec') fdata = op.join(tmpdir, 'dti.nii.gz') make_dti_data(fbval, fbvec, fdata) file_dict = dti.fit_dti(fdata, fbval, fbvec, out_dir=tmpdir) params_file = file_dict['params'] gtab = dpg.gradient_table(fbval, fbvec) predict_fname = dti.predict(params_file, gtab, S0_file=fdata, out_dir=tmpdir) prediction = nib.load(predict_fname).get_fdata() npt.assert_almost_equal(prediction, nib.load(fdata).get_fdata()) # If you have a mask into the volume, you will predict only that # part of the volume: mask = np.zeros(prediction.shape[:3], dtype=bool) mask[2:4, 2:4, 2:4] = 1 file_dict = dti.fit_dti(fdata, fbval, fbvec, mask=mask, out_dir=tmpdir) params_file = file_dict['params'] predict_fname = dti.predict(params_file, gtab, S0_file=fdata, out_dir=tmpdir) prediction = nib.load(predict_fname).get_fdata() npt.assert_almost_equal(prediction[mask], nib.load(fdata).get_fdata()[mask])
def test_predict_dti(): with nbtmp.InTemporaryDirectory() as tmpdir: fbval = op.join(tmpdir, 'dti.bval') fbvec = op.join(tmpdir, 'dti.bvec') fdata = op.join(tmpdir, 'dti.nii.gz') make_dti_data(fbval, fbvec, fdata) file_dict = dti.fit_dti(fdata, fbval, fbvec, out_dir=tmpdir) params_file = file_dict['params'] gtab = dpg.gradient_table(fbval, fbvec) predict_fname = dti.predict(params_file, gtab, S0_file=fdata, out_dir=tmpdir) prediction = nib.load(predict_fname).get_data() npt.assert_almost_equal(prediction, nib.load(fdata).get_data()) # If you have a mask into the volume, you will predict only that # part of the volume: mask = np.zeros(prediction.shape[:3], dtype=bool) mask[2:4, 2:4, 2:4] = 1 file_dict = dti.fit_dti(fdata, fbval, fbvec, mask=mask, out_dir=tmpdir) params_file = file_dict['params'] predict_fname = dti.predict(params_file, gtab, S0_file=fdata, out_dir=tmpdir) prediction = nib.load(predict_fname).get_data() npt.assert_almost_equal(prediction[mask], nib.load(fdata).get_data()[mask])
def test_dti_tracking(): fdict = fit_dti(fdata, fbval, fbvec) for directions in ["det", "prob"]: sl_serial = track(fdict['params'], directions, max_angle=30., sphere=None, seed_mask=None, seeds=seeds, stop_mask=None, stop_threshold=0.2, step_size=0.5, engine="serial") npt.assert_equal(sl_serial[0].shape[-1], 3) for engine in ["dask", "joblib"]: for backend in ["threading"]: sl_parallel = track(fdict['params'], directions, max_angle=30., sphere=None, seed_mask=None, seeds=seeds, stop_mask=None, stop_threshold=0, step_size=0.5, n_jobs=2, engine=engine, backend=backend) npt.assert_equal(sl_parallel[0].shape[-1], 3) if directions == 'det': npt.assert_almost_equal(sl_parallel[0], sl_serial[0])
def test_dti_tracking(): fdict = fit_dti(fdata, fbval, fbvec) for directions in ["det", "prob"]: sl_serial = track(fdict['params'], directions, max_angle=30., sphere=None, seed_mask=None, seeds=seeds, stop_mask=None, stop_threshold=0.2, step_size=0.5, n_jobs=1) npt.assert_equal(sl_serial[0].shape[-1], 3) for engine in ["dask"]: for backend in ["threading"]: sl_parallel = track(fdict['params'], directions, max_angle=30., sphere=None, seed_mask=None, seeds=seeds, stop_mask=None, stop_threshold=0.2, step_size=0.5, n_jobs=2, engine=engine, backend=backend) npt.assert_equal(sl_parallel[0].shape[-1], 3) if directions == 'det': npt.assert_almost_equal(sl_parallel[0], sl_serial[0])
def test_fit_dti(): # Let's see whether we can pass a list of files for each one: fdata1, fbval1, fbvec1 = dpd.get_fnames('small_101D') fdata2, fbval2, fbvec2 = dpd.get_fnames('small_101D') with nbtmp.InTemporaryDirectory() as tmpdir: file_dict = dti.fit_dti([fdata1, fdata2], [fbval1, fbval2], [fbvec1, fbvec2], out_dir=tmpdir) for f in file_dict.values(): npt.assert_(op.exists(f))
def test_dti_tracking(): fdict = fit_dti(fdata, fbval, fbvec) for directions in ["det", "prob"]: sl = track(fdict['params'], directions, max_angle=30., sphere=None, seed_mask=None, n_seeds=1, step_size=0.5) npt.assert_(len(sl[0]) > 10)
def test_dti_tracking(): fdict = fit_dti(fdata, fbval, fbvec) for directions in ["det", "prob"]: sl = track(fdict['params'], directions, max_angle=30., sphere=None, seed_mask=None, seeds=1, step_size=0.5) npt.assert_(len(sl[0]) > 10)
def main(): with open('config.json') as config_json: config = json.load(config_json) data_file = str(config['data_file']) data_bval = str(config['data_bval']) data_bvec = str(config['data_bvec']) img = nib.load(data_file) print("Calculating DTI...") if not op.exists('./dti_FA.nii.gz'): dti_params = dti.fit_dti(data_file, data_bval, data_bvec, out_dir='.') else: dti_params = {'FA': './dti_FA.nii.gz', 'params': './dti_params.nii.gz'} tg = nib.streamlines.load('csa_prob.trk').tractogram streamlines = tg.apply_affine(np.linalg.inv(img.affine)).streamlines # Use only a small portion of the streamlines, for expedience: streamlines = streamlines[::100] templates = afd.read_templates() bundle_names = ["CST", "ILF"] bundles = {} for name in bundle_names: for hemi in ['_R', '_L']: bundles[name + hemi] = { 'ROIs': [ templates[name + '_roi1' + hemi], templates[name + '_roi1' + hemi] ], 'rules': [True, True] } print("Registering to template...") MNI_T2_img = dpd.read_mni_template() bvals, bvecs = read_bvals_bvecs(data_bval, data_bvec) gtab = gradient_table(bvals, bvecs, b0_threshold=100) mapping = reg.syn_register_dwi(data_file, gtab) reg.write_mapping(mapping, './mapping.nii.gz') print("Segmenting fiber groups...") fiber_groups = seg.segment(data_file, data_bval, data_bvec, streamlines, bundles, reg_template=MNI_T2_img, mapping=mapping, as_generator=False, affine=img.affine) """
def test_fit_dti(): # Let's see whether we can pass a list of files for each one: fdata1, fbval1, fbvec1 = dpd.get_data('small_101D') fdata2, fbval2, fbvec2 = dpd.get_data('small_101D') with nbtmp.InTemporaryDirectory() as tmpdir: file_dict = dti.fit_dti([fdata1, fdata2], [fbval1, fbval2], [fbvec1, fbvec2], out_dir=tmpdir) for f in file_dict.values(): npt.assert_(op.exists(f))
def test_dti_tracking(): fdict = fit_dti(fdata, fbval, fbvec) for directions in ["det", "prob"]: sl = track(fdict['params'], directions, max_angle=30., sphere=None, seed_mask=None, n_seeds=1, step_size=step_size, min_length=min_length).streamlines npt.assert_(len(sl[0]) >= min_length * step_size)
def main(): with open('config.json') as config_json: config = json.load(config_json) data_file = str(config['data_file']) data_bval = str(config['data_bval']) data_bvec = str(config['data_bvec']) tracks = str(config['tracks']) print("Calculating DTI...") if not os.path.exists('./dti_FA.nii.gz'): dti_params = dti.fit_dti(data_file, data_bval, data_bvec, out_dir='.') else: dti_params = {'FA': './dti_FA.nii.gz', 'params': './dti_params.nii.gz'} FA_img = nib.load(dti_params['FA']) FA_data = FA_img.get_data() print("Extracting tract profiles...") path = os.getcwd() + '/profile/' if not os.path.exists(path): os.makedirs(path) for t in os.listdir(tracks): if t.endswith('.tck'): tg = nib.streamlines.load(tracks + '/' + t) streamlines = list(tg.streamlines) profile = seg.calculate_tract_profile(FA_data, streamlines) profile = profile.tolist() t = os.path.splitext( os.path.basename(t))[0] #remove the .tck from string p = path + '/' + t + '.json' json.dump(profile, codecs.open(p, 'w', encoding='utf-8'), separators=(',', ':'), sort_keys=True, indent=4)
import AFQ.segmentation as seg import AFQ.api as api dpd.fetch_stanford_hardi() hardi_dir = op.join(fetcher.dipy_home, "stanford_hardi") hardi_fdata = op.join(hardi_dir, "HARDI150.nii.gz") hardi_fbval = op.join(hardi_dir, "HARDI150.bval") hardi_fbvec = op.join(hardi_dir, "HARDI150.bvec") img = nib.load(hardi_fdata) print("Calculating DTI...") if not op.exists('./dti_FA.nii.gz'): dti_params = dti.fit_dti(hardi_fdata, hardi_fbval, hardi_fbvec, out_dir='.') else: dti_params = {'FA': './dti_FA.nii.gz', 'params': './dti_params.nii.gz'} FA_img = nib.load(dti_params['FA']) FA_data = FA_img.get_fdata() print("Registering to template...") MNI_T2_img = afd.read_mni_template() if not op.exists('mapping.nii.gz'): import dipy.core.gradients as dpg gtab = dpg.gradient_table(hardi_fbval, hardi_fbvec) warped_hardi, mapping = reg.syn_register_dwi(hardi_fdata, gtab, template=MNI_T2_img)
data = img.get_data() gtab = dpg.gradient_table(fbval, fbvec) mean_b0 = np.mean(data[..., gtab.b0s_mask], -1) print("Calculating brain-mask") if not op.exists('./brain_mask.nii.gz'): _, brain_mask = median_otsu(mean_b0, median_radius=4, numpass=4) nib.save(nib.Nifti1Image(brain_mask.astype(int), img.affine), './brain_mask.nii.gz') else: brain_mas = nib.load('./brain_mask.nii.gz').get_data().astype(bool) print("Calculating DTI...") if not op.exists('./dti_FA.nii.gz'): dti_params = dti.fit_dti(fdata, fbval, fbvec, out_dir='.', mask=brain_mask) else: dti_params = {'FA': './dti_FA.nii.gz', 'MD': './dti_MD.nii.gz', 'RD': './dti_RD.nii.gz', 'AD': './dti_AD.nii.gz', 'params': './dti_params.nii.gz'} print("Registering to template...") MNI_T2_img = dpd.read_mni_template() if not op.exists('mapping.nii.gz'): import dipy.core.gradients as dpg gtab = dpg.gradient_table(fbval, fbvec) mapping = reg.syn_register_dwi(fdata, gtab) reg.write_mapping(mapping, './mapping.nii.gz') else:
def main(): with open('config.json') as config_json: config = json.load(config_json) #Paths to data data_file = str(config['data_file']) data_bval = str(config['data_bval']) data_bvec = str(config['data_bvec']) img = nib.load(data_file) print('Loaded Data') print('Calculating DTI') if not op.exists('./dti_FA.nii.gz'): dti_params = dti.fit_dti(data_file, data_bval, data_bvec,out_dir='.') else: dti_params = {'FA': './dti_FA.nii.gz','params': './dti_params.nii.gz'} tg = nib.streamlines.load(str(config['tck_data'])).tractogram #cannot remove inv, affine streamlines = tg.apply_affine(np.linalg.inv(img.affine)).streamlines #streamlines = tg.streamlines print('Loaded streamlines') # Use only a small portion of the streamlines, for expedience: streamlines = streamlines[::100] templates = afd.read_templates() bundle_names = ["CST", "ILF"] bundles = {} for name in bundle_names: for hemi in ['_R', '_L']: bundles[name + hemi] = {'ROIs': [templates[name + '_roi1' + hemi], templates[name + '_roi1' + hemi]], 'rules': [True, True]} print('Set Bundles') MNI_T2_img = dpd.read_mni_template() print("Registering to template...") bvals, bvecs = read_bvals_bvecs(data_bval, data_bvec) if not op.exists('mapping.nii.gz'): #bvals, bvecs = read_bvals_bvecs(data_bval, data_bvec) gtab = gradient_table(bvals, bvecs) mapping = reg.syn_register_dwi(data_file, gtab) reg.write_mapping(mapping, './mapping.nii.gz') else: mapping = reg.read_mapping('./mapping.nii.gz', img, MNI_T2_img) print("Segmenting fiber groups...") fiber_groups = seg.segment(data_file, data_bval, data_bvec, streamlines, bundles, reg_template=MNI_T2_img, mapping=mapping, as_generator=False, affine=img.affine) path = os.getcwd() + '/tract1/' if not os.path.exists(path): os.makedirs(path) print('Creating tck files') for fg in fiber_groups: streamlines = fiber_groups[fg] fname = fg + ".tck" trg = nib.streamlines.Tractogram(streamlines, affine_to_rasmm=img.affine) nib.streamlines.save(trg,path+fname) print('Finished segment')
import AFQ.dti as dti import AFQ.segmentation as seg dpd.fetch_stanford_hardi() hardi_dir = op.join(fetcher.dipy_home, "stanford_hardi") hardi_fdata = op.join(hardi_dir, "HARDI150.nii.gz") hardi_fbval = op.join(hardi_dir, "HARDI150.bval") hardi_fbvec = op.join(hardi_dir, "HARDI150.bvec") img = nib.load(hardi_fdata) print("Calculating DTI...") if not op.exists('./dti_FA.nii.gz'): dti_params = dti.fit_dti(hardi_fdata, hardi_fbval, hardi_fbvec, out_dir='.') else: dti_params = {'FA': './dti_FA.nii.gz', 'params': './dti_params.nii.gz'} print("Tracking...") if not op.exists('dti_streamlines.trk'): streamlines = list(aft.track(dti_params['params'])) aus.write_trk('./dti_streamlines.trk', streamlines, affine=img.affine) else: tg = nib.streamlines.load('./dti_streamlines.trk').tractogram streamlines = tg.apply_affine(np.linalg.inv(img.affine)).streamlines # Use only a small portion of the streamlines, for expedience: streamlines = streamlines[::100]