Ejemplo n.º 1
0
def test_all_models_dispersable():
    scheme = wu_minn_hcp_acquisition_scheme()

    dispersable_models = [
        [cylinder_models.C1Stick()],
        [cylinder_models.C2CylinderStejskalTannerApproximation()],
        [cylinder_models.C3CylinderCallaghanApproximation()],
        [cylinder_models.C4CylinderGaussianPhaseApproximation()],
        [gaussian_models.G1Ball(),
         gaussian_models.G2Zeppelin()], [gaussian_models.G3TemporalZeppelin()],
        [sphere_models.S1Dot(),
         gaussian_models.G2Zeppelin()],
        [
            sphere_models.S2SphereStejskalTannerApproximation(),
            gaussian_models.G2Zeppelin()
        ]
    ]

    spherical_distributions = [
        distribute_models.SD1WatsonDistributed,
        distribute_models.SD2BinghamDistributed
    ]

    for model in dispersable_models:
        for distribution in spherical_distributions:
            dist_mod = distribution(model)
            params = {}
            for param, card in dist_mod.parameter_cardinality.items():
                params[param] = np.random.rand(
                    card) * dist_mod.parameter_scales[param]
            assert_equal(isinstance(dist_mod(scheme, **params), np.ndarray),
                         True)
Ejemplo n.º 2
0
def test_stick_tortuous_zeppelin():
    stick = cylinder_models.C1Stick()
    zeppelin = gaussian_models.G2Zeppelin()

    stick_and_zeppelin = (
        modeling_framework.MultiCompartmentModel(
            models=[stick, zeppelin])
    )

    stick_and_zeppelin.set_tortuous_parameter(
        'G2Zeppelin_1_lambda_perp',
        'C1Stick_1_lambda_par',
        'partial_volume_0',
        'partial_volume_1'
    )
    stick_and_zeppelin.set_equal_parameter(
        'C1Stick_1_mu',
        'G2Zeppelin_1_mu'
    )

    stick_and_zeppelin.set_equal_parameter(
        'G2Zeppelin_1_lambda_par',
        'C1Stick_1_lambda_par'
    )

    fitted_params = (stick_and_zeppelin.fit(
        scheme,
        camino_parallel.signal_attenuation[::20],
    ).fitted_parameters)

    mean_abs_error = np.mean(
        abs(fitted_params['partial_volume_0'].squeeze(
        ) - camino_parallel.fractions[::20]))
    assert_equal(mean_abs_error < 0.02, True)
Ejemplo n.º 3
0
def create_noddi_watson_model(lambda_iso_diff=3.e-9, lambda_par_diff=1.7e-9):
    """Creates NODDI mulit-compartment model with Watson distribution."""
    """
        Arguments:
            lambda_iso_diff: float
                isotropic diffusivity
            lambda_par_diff: float
                parallel diffusivity
        Returns: MultiCompartmentModel instance
            NODDI Watson multi-compartment model instance
    """
    ball = gaussian_models.G1Ball()
    stick = cylinder_models.C1Stick()
    zeppelin = gaussian_models.G2Zeppelin()
    watson_dispersed_bundle = SD1WatsonDistributed(models=[stick, zeppelin])
    watson_dispersed_bundle.set_tortuous_parameter('G2Zeppelin_1_lambda_perp',
                                                   'C1Stick_1_lambda_par',
                                                   'partial_volume_0')
    watson_dispersed_bundle.set_equal_parameter('G2Zeppelin_1_lambda_par',
                                                'C1Stick_1_lambda_par')
    watson_dispersed_bundle.set_fixed_parameter('G2Zeppelin_1_lambda_par',
                                                lambda_par_diff)

    NODDI_mod = MultiCompartmentModel(models=[ball, watson_dispersed_bundle])
    NODDI_mod.set_fixed_parameter('G1Ball_1_lambda_iso', lambda_iso_diff)

    return NODDI_mod
def test_raise_mix_with_tortuosity_in_mcmodel():
    scheme = wu_minn_hcp_acquisition_scheme()
    stick = cylinder_models.C1Stick()
    zeppelin = gaussian_models.G2Zeppelin()
    mc = modeling_framework.MultiCompartmentModel([stick, zeppelin])
    mc.set_tortuous_parameter('G2Zeppelin_1_lambda_perp',
                              'C1Stick_1_lambda_par', 'partial_volume_0',
                              'partial_volume_1')

    data = stick(scheme, lambda_par=1.7e-9, mu=[0., 0.])

    assert_raises(ValueError, mc.fit, scheme, data, solver='mix')
Ejemplo n.º 5
0
def test_stick_and_tortuous_zeppelin_to_spherical_mean_fit():
    """ this is a more complex test to see if we can generate 3D data using a
    stick and zeppelin model, where we assume the perpendicular diffusivity is
    linked to the parallel diffusivity and volume fraction using tortuosity. We
    then use the spherical mean models of stick and zeppelin with the same
    tortuosity assumption to fit the 3D data (and estimating the spherical mean
    of each shell). The final check is whether the parallel diffusivity and
    volume fraction between the 3D and spherical mean models correspond."""

    gt_mu = np.clip(np.random.rand(2), .3, np.inf)
    gt_lambda_par = (np.random.rand() + 1.) * 1e-9
    gt_partial_volume = 0.3

    stick = cylinder_models.C1Stick()
    zeppelin = gaussian_models.G2Zeppelin()

    stick_and_zeppelin = (modeling_framework.MultiCompartmentModel(
        models=[stick, zeppelin]))

    stick_and_zeppelin.set_tortuous_parameter('G2Zeppelin_1_lambda_perp',
                                              'C1Stick_1_lambda_par',
                                              'partial_volume_0',
                                              'partial_volume_1')
    stick_and_zeppelin.set_equal_parameter('C1Stick_1_mu', 'G2Zeppelin_1_mu')

    stick_and_zeppelin.set_equal_parameter('C1Stick_1_lambda_par',
                                           'G2Zeppelin_1_lambda_par')

    gt_parameter_vector = (stick_and_zeppelin.parameters_to_parameter_vector(
        C1Stick_1_lambda_par=gt_lambda_par,
        C1Stick_1_mu=gt_mu,
        partial_volume_0=gt_partial_volume,
        partial_volume_1=1 - gt_partial_volume))

    E = stick_and_zeppelin.simulate_signal(scheme, gt_parameter_vector)

    # now we make the stick and zeppelin spherical mean model and check if the
    # same lambda_par and volume fraction result as the 3D generated data.
    stick_and_tortuous_zeppelin_sm = (
        modeling_framework.MultiCompartmentSphericalMeanModel(
            models=[stick, zeppelin]))

    stick_and_tortuous_zeppelin_sm.set_tortuous_parameter(
        'G2Zeppelin_1_lambda_perp', 'C1Stick_1_lambda_par', 'partial_volume_0',
        'partial_volume_1')
    stick_and_tortuous_zeppelin_sm.set_equal_parameter(
        'G2Zeppelin_1_lambda_par', 'C1Stick_1_lambda_par')

    res_sm = stick_and_tortuous_zeppelin_sm.fit(scheme,
                                                E).fitted_parameters_vector

    assert_array_almost_equal(np.r_[gt_lambda_par, gt_partial_volume],
                              res_sm.squeeze()[:-1], 2)
Ejemplo n.º 6
0
def test_multi_tissue_tortuosity_no_s0():
    stick = cylinder_models.C1Stick()
    zeppelin = gaussian_models.G2Zeppelin()
    ball = gaussian_models.G1Ball()

    model = modeling_framework.MultiCompartmentModel(
        models=[stick, zeppelin, ball])
    model.set_tortuous_parameter('G2Zeppelin_1_lambda_perp',
                                 'C1Stick_1_lambda_par', 'partial_volume_0',
                                 'partial_volume_1', True)
    tort = model.parameter_links[0][2]
    s0ic, s0ec = tort.S0_intra, tort.S0_extra
    assert_(s0ic == 1 and s0ec == 1)
Ejemplo n.º 7
0
def test_equivalence_sh_distributed_mc_with_mcsh():
    """
    We test if we can input a Watson-distributed zeppelin and stick into an
    SD3SphericalHarmonicsDistributedModel in an MC-model, and compare it with
    an MCSH model with the same watson distribution as a kernel.
    """
    stick = cylinder_models.C1Stick()
    zep = gaussian_models.G2Zeppelin()

    mck_dist = distribute_models.SD1WatsonDistributed([stick, zep])
    mck_dist.set_equal_parameter('G2Zeppelin_1_lambda_par',
                                 'C1Stick_1_lambda_par')
    mck_dist.set_tortuous_parameter('G2Zeppelin_1_lambda_perp',
                                    'G2Zeppelin_1_lambda_par',
                                    'partial_volume_0')

    mcsh = modeling_framework.MultiCompartmentSphericalHarmonicsModel(
        models=[mck_dist], sh_order=8)
    mc = modeling_framework.MultiCompartmentModel([
        distribute_models.SD3SphericalHarmonicsDistributed([mck_dist],
                                                           sh_order=8)
    ])

    lambda_par = 0.
    odi = .02
    sh_coeff = np.ones(45)
    sh_coeff[0] = 1 / (2 * np.sqrt(np.pi))
    pv0 = .3

    params_mcsh = {
        'SD1WatsonDistributed_1_partial_volume_0': pv0,
        'SD1WatsonDistributed_1_G2Zeppelin_1_lambda_par': lambda_par,
        'SD1WatsonDistributed_1_SD1Watson_1_odi': odi,
        'sh_coeff': sh_coeff
    }

    basemod = 'SD3SphericalHarmonicsDistributed_1_'
    params_mc = {
        basemod + 'SD1WatsonDistributed_1_partial_volume_0': pv0,
        basemod + 'SD1WatsonDistributed_1_G2Zeppelin_1_lambda_par': lambda_par,
        basemod + 'SD1WatsonDistributed_1_SD1Watson_1_odi': odi,
        basemod + 'SD3SphericalHarmonics_1_sh_coeff': sh_coeff
    }

    E_mcsh = mcsh.simulate_signal(scheme, params_mcsh)
    E_mc = mc.simulate_signal(scheme, params_mc)

    np.testing.assert_array_almost_equal(E_mcsh, E_mc)
Ejemplo n.º 8
0
def test_estimate_spherical_mean_multi_shell(lambda_par=1.7e-9,
                                             lambda_perp=0.8e-9,
                                             mu=np.r_[0, 0]):
    zeppelin = gaussian_models.G2Zeppelin()
    zeppelin_smt = zeppelin.spherical_mean(scheme,
                                           lambda_par=lambda_par,
                                           lambda_perp=lambda_perp,
                                           mu=mu)
    zeppelin_multishell = zeppelin(scheme,
                                   lambda_par=lambda_par,
                                   lambda_perp=lambda_perp,
                                   mu=mu)

    smt_multi_shell = estimate_spherical_mean_multi_shell(
        zeppelin_multishell, scheme)
    assert_array_almost_equal(smt_multi_shell, zeppelin_smt)
Ejemplo n.º 9
0
def test_MIX_fitting_multimodel():
    ball = gaussian_models.G1Ball()
    zeppelin = gaussian_models.G2Zeppelin()
    ball_and_zeppelin = (modeling_framework.MultiCompartmentModel(
        models=[ball, zeppelin]))

    parameter_vector = ball_and_zeppelin.parameters_to_parameter_vector(
        G1Ball_1_lambda_iso=2.7e-9,
        partial_volume_0=.2,
        partial_volume_1=.8,
        G2Zeppelin_1_lambda_perp=.5e-9,
        G2Zeppelin_1_mu=(np.pi / 2., np.pi / 2.),
        G2Zeppelin_1_lambda_par=1.7e-9)

    E = ball_and_zeppelin.simulate_signal(scheme, parameter_vector)
    fit = ball_and_zeppelin.fit(scheme, E,
                                solver='mix').fitted_parameters_vector
    assert_array_almost_equal(abs(fit).squeeze(), parameter_vector, 2)
Ejemplo n.º 10
0
def test_bingham_dispersed_stick_tortuous_zeppelin():
    stick = cylinder_models.C1Stick()
    zeppelin = gaussian_models.G2Zeppelin()

    bingham_bundle = distribute_models.SD2BinghamDistributed(
        models=[stick, zeppelin])

    bingham_bundle.set_tortuous_parameter(
        'G2Zeppelin_1_lambda_perp',
        'G2Zeppelin_1_lambda_par',
        'partial_volume_0'
    )

    bingham_bundle.set_equal_parameter(
        'G2Zeppelin_1_lambda_par',
        'C1Stick_1_lambda_par')

    bingham_bundle.set_fixed_parameter(
        'G2Zeppelin_1_lambda_par', 1.7e-9)

    bingham_bundle.set_fixed_parameter(
        'SD2Bingham_1_mu', [0., 0.])

    mc_bingham = (
        modeling_framework.MultiCompartmentModel(
            models=[bingham_bundle])
    )

    beta0 = camino_dispersed.beta > 0
    diff17 = camino_dispersed.diffusivities == 1.7e-9
    mask = np.all([beta0, diff17], axis=0)
    E_watson = camino_dispersed.signal_attenuation[mask]
    fractions_watson = camino_dispersed.fractions[mask]

    fitted_params = (mc_bingham.fit(scheme,
                                    E_watson[::200]).fitted_parameters
                     )

    mean_abs_error = np.mean(
        abs(fitted_params['SD2BinghamDistributed_1_partial_volume_0'].squeeze(
        ) - fractions_watson[::200]))
    assert_equal(mean_abs_error < 0.035, True)
def test_parametric_fod_spherical_mean_model():
    stick = cylinder_models.C1Stick()
    watsonstick = distribute_models.SD1WatsonDistributed([stick])
    params = {}
    for parameter, card, in watsonstick.parameter_cardinality.items():
        params[parameter] = (np.random.rand(card) *
                             watsonstick.parameter_scales[parameter])
    data = np.atleast_2d(watsonstick(scheme, **params))

    stick = cylinder_models.C1Stick()
    zeppelin = gaussian_models.G2Zeppelin()
    smt = modeling_framework.MultiCompartmentSphericalMeanModel(
        [stick, zeppelin])
    smt.set_tortuous_parameter('G2Zeppelin_1_lambda_perp',
                               'C1Stick_1_lambda_par', 'partial_volume_0',
                               'partial_volume_1')
    smt.set_equal_parameter('G2Zeppelin_1_lambda_par', 'C1Stick_1_lambda_par')

    smt_fit = smt.fit(scheme, data)

    assert_raises(ValueError,
                  smt_fit.return_parametric_fod_model,
                  Ncompartments=1.5)

    assert_raises(ValueError,
                  smt_fit.return_parametric_fod_model,
                  Ncompartments=0)

    assert_raises(ValueError,
                  smt_fit.return_parametric_fod_model,
                  distribution='bla')

    for distribution_name in ['watson', 'bingham']:
        fod_model = smt_fit.return_parametric_fod_model(
            distribution=distribution_name, Ncompartments=1)
        fitted_fod_model = fod_model.fit(scheme, data)
        assert_(isinstance(fitted_fod_model.fitted_parameters, dict))
Ejemplo n.º 12
0
def test_spherical_mean_stick_tortuous_zeppelin():
    stick = cylinder_models.C1Stick()
    zeppelin = gaussian_models.G2Zeppelin()

    mc_mdi = modeling_framework.MultiCompartmentSphericalMeanModel(
        models=[stick, zeppelin])

    mc_mdi.set_tortuous_parameter('G2Zeppelin_1_lambda_perp',
                                  'C1Stick_1_lambda_par',
                                  'partial_volume_0',
                                  'partial_volume_1')
    mc_mdi.set_equal_parameter('G2Zeppelin_1_lambda_par',
                               'C1Stick_1_lambda_par')

    fitted_params_par = (
        mc_mdi.fit(
            scheme,
            camino_parallel.signal_attenuation[::20]
        ).fitted_parameters
    )
    fitted_params_disp = (
        mc_mdi.fit(
            scheme,
            camino_dispersed.signal_attenuation[::40]
        ).fitted_parameters
    )

    mean_abs_error_par = np.mean(
        abs(fitted_params_par['partial_volume_0'].squeeze(
        ) - camino_parallel.fractions[::20]))

    mean_abs_error_disp = np.mean(
        abs(fitted_params_disp['partial_volume_0'].squeeze(
        ) - camino_dispersed.fractions[::40]))
    assert_equal(mean_abs_error_par < 0.02, True)
    assert_equal(mean_abs_error_disp < 0.02, True)
Ejemplo n.º 13
0
def test_orienting_zeppelin():
    # test for orienting the axis of the Zeppelin along mu
    # first test to see if Ezeppelin equals Gaussian with lambda_par along mu
    random_mu = np.random.rand(2) * np.pi
    n = np.array([utils.sphere2cart(np.r_[1, random_mu])])
    random_bval = np.r_[np.random.rand() * 1e9]
    scheme = acquisition_scheme_from_bvalues(random_bval, n, delta, Delta)
    random_lambda_par = np.random.rand() * 3 * 1e-9
    random_lambda_perp = random_lambda_par / 2.

    zeppelin = gaussian_models.G2Zeppelin(
        mu=random_mu, lambda_par=random_lambda_par,
        lambda_perp=random_lambda_perp)
    E_zep_par = zeppelin(scheme)
    E_check_par = np.exp(-random_bval * random_lambda_par)
    assert_almost_equal(E_zep_par, E_check_par)

    # second test to see if Ezeppelin equals Gaussian with lambda_perp
    # perpendicular to mu
    n_perp = np.array([perpendicular_vector(n[0])])
    scheme = acquisition_scheme_from_bvalues(random_bval, n_perp, delta, Delta)
    E_zep_perp = zeppelin(scheme)
    E_check_perp = np.exp(-random_bval * random_lambda_perp)
    assert_almost_equal(E_zep_perp, E_check_perp)
all_bvecs = np.hstack((b1k_bvecs, b2k_bvecs))
all_bvals = np.hstack((b1k_bvals, b2k_bvals))
all_data = np.concatenate((b1k_fdata, b2k_fdata), axis=3)

# Prepare Dmipy Acquisition Scheme
# This is Dmipy nonsense, that if B-values are in s/mm^2 they need
# to be multiplied with 1e6 to be of the scale s/m^2

all_bvals = all_bvals * 1e6
all_bvecs = np.transpose(all_bvecs)

# The below line also takes in small delta and big delta.
# TODO Big Delta and small delta are not available quite often for the data.
acq_scheme = acquisition_scheme_from_bvalues(all_bvals, all_bvecs)

# We are ready to fit models
# Prepare SMT Model
zeppelin = gaussian_models.G2Zeppelin()
smt_mod = modeling_framework.MultiCompartmentSphericalMeanModel(
    models=[zeppelin])
#smt_mod.set_fractional_parameter()

# Fit SMT
smt_fit_hcp = smt_mod.fit(acq_scheme,
                          all_data,
                          Ns=30,
                          mask=all_data[..., 0] > 0,
                          use_parallel_processing=False)

# TODO Use a model name with the dictionary for saving the file name for a specific subject per model.
print('Debug here')
def main():
    #Argparse Stuff
    parser = argparse.ArgumentParser(description='subject_id')
    parser.add_argument('--subject_id', type=str, default='135124')
    args = parser.parse_args()

    # Plot Save Path
    base_plot_path = r'/nfs/masi/nathv/py_src_code_2020/dmipy_model_pictures'
    base_plot_path = os.path.normpath(base_plot_path)

    # Method Saving Paths
    # TODO KARTHIK
    base_save_path = r'/root/hcp_results'
    base_save_path = os.path.normpath(base_save_path)
    if os.path.exists(base_save_path)==False:
        os.mkdir(base_save_path)

    # Create base saving path for Method
    # TODO The Method name can be made an argument later on
    method_name = 'NODDI_WATSON'

    # Base HCP Data Path
    # TODO KARTHIK This is where we hard set HCP's Data Path
    base_data_path = r'/root/local_mount/data'
    base_data_path = os.path.normpath(base_data_path)

    # Subject ID
    subj_ID = args.subject_id

    # Subject Save Path
    subj_save_path = os.path.join(base_save_path, subj_ID)
    if os.path.exists(subj_save_path)==False:
        os.mkdir(subj_save_path)

    # TODO For later the subject data, bval and bvec reading part can be put inside a function
    subj_data_path = os.path.join(base_data_path, subj_ID, 'T1w', 'Diffusion')

    # Read the Nifti file, bvals and bvecs
    subj_bvals = np.loadtxt(os.path.join(subj_data_path, 'bvals'))
    subj_bvecs = np.loadtxt(os.path.join(subj_data_path, 'bvecs'))

    all_bvals = subj_bvals * 1e6
    all_bvecs = np.transpose(subj_bvecs)

    subj_Acq_Scheme = acquisition_scheme_from_bvalues(all_bvals, all_bvecs)
    print(subj_Acq_Scheme.print_acquisition_info)

    print('Loading the Nifti Data ...')
    data_start_time = time.time()

    subj_babel_object = nib.load(os.path.join(subj_data_path, 'data.nii.gz'))
    subj_data = subj_babel_object.get_fdata()
    axial_slice_data = subj_data[50:65, 50:65, 60:62, :]

    data_end_time = time.time()
    data_time = np.int(np.round(data_end_time - data_start_time))

    print('Data Loaded ... Time Taken: {}'.format(data_end_time - data_start_time))
    print('The Data Dimensions are: {}'.format(subj_data.shape))

    #### NODDI Watson ####
    ball = gaussian_models.G1Ball()
    stick = cylinder_models.C1Stick()
    zeppelin = gaussian_models.G2Zeppelin()

    watson_dispersed_bundle = SD1WatsonDistributed(models=[stick, zeppelin])

    watson_dispersed_bundle.set_tortuous_parameter('G2Zeppelin_1_lambda_perp', 'C1Stick_1_lambda_par',
                                                   'partial_volume_0')
    watson_dispersed_bundle.set_equal_parameter('G2Zeppelin_1_lambda_par', 'C1Stick_1_lambda_par')
    watson_dispersed_bundle.set_fixed_parameter('G2Zeppelin_1_lambda_par', 1.7e-9)

    NODDI_mod = MultiCompartmentModel(models=[ball, watson_dispersed_bundle])
    NODDI_mod.set_fixed_parameter('G1Ball_1_lambda_iso', 3e-9)

    print('Fitting the NODDI Model ...')
    fit_start_time = time.time()
    NODDI_fit_hcp = NODDI_mod.fit(subj_Acq_Scheme,
                                  subj_data,
                                  mask=subj_data[..., 0] > 0,
                                  use_parallel_processing=True,
                                  number_of_processors=32)
    fit_end_time = time.time()
    print('Model Fitting Completed ... Time Taken to fit: {}'.format(fit_end_time - fit_start_time))
    fit_time = np.int(np.round(fit_end_time - fit_start_time))

    fitted_parameters = NODDI_fit_hcp.fitted_parameters

    para_Names_list = []
    for key, value in fitted_parameters.items():
        para_Names_list.append(key)

    ### Nifti Saving Part
    # Create a directory per subject
    subj_method_save_path = os.path.join(subj_save_path, method_name)
    if os.path.exists(subj_method_save_path)==False:
        os.mkdir(subj_method_save_path)

    # Retrieve the affine from already Read Nifti file to form the header
    affine = subj_babel_object.affine

    # Loop over fitted parameters name list
    for each_fitted_parameter in para_Names_list:
        new_img = nib.Nifti1Image(fitted_parameters[each_fitted_parameter], affine)

        # Form the file path
        f_name = each_fitted_parameter + '.nii.gz'
        param_file_path = os.path.join(subj_method_save_path, f_name)

        nib.save(new_img, param_file_path)

    return None
Ejemplo n.º 16
0
 def __init__(self):
     self.stick = cylinder_models.C1Stick()
     self.ball = gaussian_models.G1Ball()
     self.zeppelin = gaussian_models.G2Zeppelin()
Ejemplo n.º 17
0
def test_raise_spherical_distribution_in_spherical_mean():
    zeppelin = gaussian_models.G2Zeppelin()
    watson = distribute_models.SD1WatsonDistributed([zeppelin])
    assert_raises(ValueError,
                  modeling_framework.MultiCompartmentSphericalMeanModel,
                  [watson])
Ejemplo n.º 18
0
def test_multi_voxel_parametric_to_sm_to_sh_fod_watson():
    stick = cylinder_models.C1Stick()
    zeppelin = gaussian_models.G2Zeppelin()
    watsonstick = distribute_models.SD1WatsonDistributed([stick, zeppelin])

    watsonstick.set_equal_parameter('G2Zeppelin_1_lambda_par',
                                    'C1Stick_1_lambda_par')
    watsonstick.set_tortuous_parameter('G2Zeppelin_1_lambda_perp',
                                       'G2Zeppelin_1_lambda_par',
                                       'partial_volume_0')
    mc_mod = modeling_framework.MultiCompartmentModel([watsonstick])

    parameter_dict = {
        'SD1WatsonDistributed_1_SD1Watson_1_mu':
        np.random.rand(10, 2),
        'SD1WatsonDistributed_1_partial_volume_0':
        np.linspace(0.1, 0.9, 10),
        'SD1WatsonDistributed_1_G2Zeppelin_1_lambda_par':
        np.linspace(1.5, 2.5, 10) * 1e-9,
        'SD1WatsonDistributed_1_SD1Watson_1_odi':
        np.linspace(0.3, 0.7, 10)
    }

    data = mc_mod.simulate_signal(scheme, parameter_dict)

    sm_mod = modeling_framework.MultiCompartmentSphericalMeanModel(
        [stick, zeppelin])
    sm_mod.set_equal_parameter('G2Zeppelin_1_lambda_par',
                               'C1Stick_1_lambda_par')
    sm_mod.set_tortuous_parameter('G2Zeppelin_1_lambda_perp',
                                  'G2Zeppelin_1_lambda_par',
                                  'partial_volume_0', 'partial_volume_1')

    sf_watson = []
    for mu, odi in zip(
            parameter_dict['SD1WatsonDistributed_1_SD1Watson_1_mu'],
            parameter_dict['SD1WatsonDistributed_1_SD1Watson_1_odi']):
        watson = distributions.SD1Watson(mu=mu, odi=odi)
        sf_watson.append(watson(sphere.vertices))
    sf_watson = np.array(sf_watson)

    sm_fit = sm_mod.fit(scheme, data)
    sh_mod = sm_fit.return_spherical_harmonics_fod_model()

    sh_fit_auto = sh_mod.fit(scheme, data)  # will pick tournier
    fod_tournier = sh_fit_auto.fod(sphere.vertices)
    assert_array_almost_equal(fod_tournier, sf_watson, 1)

    sh_fit_tournier = sh_mod.fit(scheme,
                                 data,
                                 solver='csd_tournier07',
                                 unity_constraint=False)
    fod_tournier = sh_fit_tournier.fod(sphere.vertices)
    assert_array_almost_equal(fod_tournier, sf_watson, 1)

    sh_fit_cvxpy = sh_mod.fit(scheme,
                              data,
                              solver='csd_cvxpy',
                              unity_constraint=True,
                              lambda_lb=0.)
    fod_cvxpy = sh_fit_cvxpy.fod(sphere.vertices)
    assert_array_almost_equal(fod_cvxpy, sf_watson, 2)

    sh_fit_cvxpy = sh_mod.fit(scheme,
                              data,
                              solver='csd_cvxpy',
                              unity_constraint=False,
                              lambda_lb=0.)
    fod_cvxpy = sh_fit_cvxpy.fod(sphere.vertices)
    assert_array_almost_equal(fod_cvxpy, sf_watson, 2)
Ejemplo n.º 19
0
def fit_noddi_dmipy(input_dwi,
                    input_bval,
                    input_bvec,
                    input_mask,
                    output_dir,
                    nthreads=1,
                    solver='brute2fine',
                    model_type='WATSON',
                    parallel_diffusivity=1.7e-9,
                    iso_diffusivity=3e-9,
                    bids_fmt=False,
                    bids_id=''):

    import nibabel as nib
    from dmipy.signal_models import cylinder_models, gaussian_models
    from dmipy.distributions.distribute_models import SD1WatsonDistributed, SD2BinghamDistributed
    from dmipy.core.modeling_framework import MultiCompartmentModel
    from dmipy.core import modeling_framework
    from dmipy.core.acquisition_scheme import acquisition_scheme_from_bvalues
    from dipy.io import read_bvals_bvecs

    if not os.path.exists(output_dir):
        os.mkdir(output_dir)

    #Setup the acquisition scheme
    bvals, bvecs = read_bvals_bvecs(input_bval, input_bvec)
    bvals_SI = bvals * 1e6
    acq_scheme = acquisition_scheme_from_bvalues(bvals_SI, bvecs)
    acq_scheme.print_acquisition_info

    #Load the data
    img = nib.load(input_dwi)
    data = img.get_data()

    #Load the mask
    img = nib.load(input_mask)
    mask_data = img.get_data()

    ball = gaussian_models.G1Ball()  #CSF
    stick = cylinder_models.C1Stick()  #Intra-axonal diffusion
    zeppelin = gaussian_models.G2Zeppelin()  #Extra-axonal diffusion

    if model_type == 'Bingham' or model_type == 'BINGHAM':
        dispersed_bundle = SD2BinghamDistributed(models=[stick, zeppelin])
    else:
        dispersed_bundle = SD1WatsonDistributed(models=[stick, zeppelin])

    dispersed_bundle.set_tortuous_parameter('G2Zeppelin_1_lambda_perp',
                                            'C1Stick_1_lambda_par',
                                            'partial_volume_0')
    dispersed_bundle.set_equal_parameter('G2Zeppelin_1_lambda_par',
                                         'C1Stick_1_lambda_par')
    dispersed_bundle.set_fixed_parameter('G2Zeppelin_1_lambda_par',
                                         parallel_diffusivity)

    NODDI_mod = MultiCompartmentModel(models=[ball, dispersed_bundle])
    NODDI_mod.set_fixed_parameter('G1Ball_1_lambda_iso', iso_diffusivity)
    NODDI_fit = NODDI_mod.fit(acq_scheme,
                              data,
                              mask=mask_data,
                              number_of_processors=nthreads,
                              solver=solver)

    fitted_parameters = NODDI_fit.fitted_parameters

    if model_type == 'Bingham' or model_type == 'BINGHAM':
        # get total Stick signal contribution
        vf_intra = (
            fitted_parameters['SD2BinghamDistributed_1_partial_volume_0'] *
            fitted_parameters['partial_volume_1'])

        # get total Zeppelin signal contribution
        vf_extra = (
            (1 - fitted_parameters['SD2BinghamDistributed_1_partial_volume_0'])
            * fitted_parameters['partial_volume_1'])
        vf_iso = fitted_parameters['partial_volume_0']
        odi = fitted_parameters['SD2BinghamDistributed_1_SD2Bingham_1_odi']

    else:
        # get total Stick signal contribution
        vf_intra = (
            fitted_parameters['SD1WatsonDistributed_1_partial_volume_0'] *
            fitted_parameters['partial_volume_1'])

        # get total Zeppelin signal contribution
        vf_extra = (
            (1 - fitted_parameters['SD1WatsonDistributed_1_partial_volume_0'])
            * fitted_parameters['partial_volume_1'])
        vf_iso = fitted_parameters['partial_volume_0']
        odi = fitted_parameters['SD1WatsonDistributed_1_SD1Watson_1_odi']

    if bids_fmt:
        output_odi = output_dir + '/' + bids_id + '_model-NODDI_parameter-ODI.nii.gz'
        output_vf_intra = output_dir + '/' + bids_id + '_model-NODDI_parameter-ICVF.nii.gz'
        output_vf_extra = output_dir + '/' + bids_id + '_model-NODDI_parameter-EXVF.nii.gz'
        output_vf_iso = output_dir + '/' + bids_id + '_model-NODDI_parameter-ISO.nii.gz'
    else:
        output_odi = output_dir + '/noddi_ODI.nii.gz'
        output_vf_intra = output_dir + '/noddi_ICVF.nii.gz'
        output_vf_extra = output_dir + '/noddi_EXVF.nii.gz'
        output_vf_iso = output_dir + '/noddi_ISO.nii.gz'

    #Save the images
    odi_img = nib.Nifti1Image(odi, img.get_affine(), img.header)
    odi_img.set_sform(img.get_sform())
    odi_img.set_qform(img.get_qform())
    nib.save(odi_img, output_odi)

    icvf_img = nib.Nifti1Image(vf_intra, img.get_affine(), img.header)
    icvf_img.set_sform(img.get_sform())
    icvf_img.set_qform(img.get_qform())
    nib.save(icvf_img, output_vf_intra)

    ecvf_img = nib.Nifti1Image(vf_extra, img.get_affine(), img.header)
    ecvf_img.set_sform(img.get_sform())
    ecvf_img.set_qform(img.get_qform())
    nib.save(ecvf_img, output_vf_extra)

    iso_img = nib.Nifti1Image(vf_iso, img.get_affine(), img.header)
    iso_img.set_sform(img.get_sform())
    iso_img.set_qform(img.get_qform())
    nib.save(iso_img, output_vf_iso)
Ejemplo n.º 20
0
def main():
    # Plot Save Path
    base_plot_path = r'/nfs/masi/nathv/py_src_code_2020/dmipy_model_pictures'
    base_plot_path = os.path.normpath(base_plot_path)

    # Method Saving Paths
    base_save_path = r'/nfs/masi/nathv/miccai_2020/micro_methods_hcp_mini'
    base_save_path = os.path.normpath(base_save_path)

    # Create base saving path for Method
    # TODO The Method name can be made an argument later on
    method_name = 'NODDI_WATSON'

    # Base HCP Data Path
    base_data_path = r'/nfs/HCP/data'
    base_data_path = os.path.normpath(base_data_path)

    # Subject ID's list
    #subj_ID_List = ['125525', '118225', '116726']
    subj_ID_List = ['115017', '114823', '116726', '118225']
    # TODO When needed loop here over the ID list
    for subj_ID in subj_ID_List:
        # Subject Save Path
        subj_save_path = os.path.join(base_save_path, subj_ID)
        if os.path.exists(subj_save_path) == False:
            os.mkdir(subj_save_path)

        # TODO For later the subject data, bval and bvec reading part can be put inside a function
        subj_data_path = os.path.join(base_data_path, subj_ID, 'T1w',
                                      'Diffusion')

        # Read the Nifti file, bvals and bvecs
        subj_bvals = np.loadtxt(os.path.join(subj_data_path, 'bvals'))
        subj_bvecs = np.loadtxt(os.path.join(subj_data_path, 'bvecs'))

        all_bvals = subj_bvals * 1e6
        all_bvecs = np.transpose(subj_bvecs)

        subj_Acq_Scheme = acquisition_scheme_from_bvalues(all_bvals, all_bvecs)
        print(subj_Acq_Scheme.print_acquisition_info)

        print('Loading the Nifti Data ...')
        data_start_time = time.time()

        subj_babel_object = nib.load(
            os.path.join(subj_data_path, 'data.nii.gz'))
        subj_data = subj_babel_object.get_fdata()
        axial_slice_data = subj_data[50:65, 50:65, 60:62, :]

        data_end_time = time.time()
        data_time = np.int(np.round(data_end_time - data_start_time))

        print('Data Loaded ... Time Taken: {}'.format(data_end_time -
                                                      data_start_time))
        print('The Data Dimensions are: {}'.format(subj_data.shape))

        #### NODDI Watson ####
        ball = gaussian_models.G1Ball()
        stick = cylinder_models.C1Stick()
        zeppelin = gaussian_models.G2Zeppelin()

        watson_dispersed_bundle = SD1WatsonDistributed(
            models=[stick, zeppelin])

        watson_dispersed_bundle.set_tortuous_parameter(
            'G2Zeppelin_1_lambda_perp', 'C1Stick_1_lambda_par',
            'partial_volume_0')
        watson_dispersed_bundle.set_equal_parameter('G2Zeppelin_1_lambda_par',
                                                    'C1Stick_1_lambda_par')
        watson_dispersed_bundle.set_fixed_parameter('G2Zeppelin_1_lambda_par',
                                                    1.7e-9)

        NODDI_mod = MultiCompartmentModel(
            models=[ball, watson_dispersed_bundle])
        NODDI_mod.set_fixed_parameter('G1Ball_1_lambda_iso', 3e-9)

        print('Fitting the NODDI Model ...')
        fit_start_time = time.time()
        NODDI_fit_hcp = NODDI_mod.fit(subj_Acq_Scheme,
                                      subj_data,
                                      mask=subj_data[..., 0] > 0)
        fit_end_time = time.time()
        print('Model Fitting Completed ... Time Taken to fit: {}'.format(
            fit_end_time - fit_start_time))
        fit_time = np.int(np.round(fit_end_time - fit_start_time))

        fitted_parameters = NODDI_fit_hcp.fitted_parameters

        para_Names_list = []
        for key, value in fitted_parameters.items():
            para_Names_list.append(key)

        ### Nifti Saving Part
        # Create a directory per subject
        subj_method_save_path = os.path.join(subj_save_path, method_name)
        if os.path.exists(subj_method_save_path) == False:
            os.mkdir(subj_method_save_path)

        # Retrieve the affine from already Read Nifti file to form the header
        affine = subj_babel_object.affine

        # Loop over fitted parameters name list
        for each_fitted_parameter in para_Names_list:
            new_img = nib.Nifti1Image(fitted_parameters[each_fitted_parameter],
                                      affine)

            # Form the file path
            f_name = each_fitted_parameter + '.nii.gz'
            param_file_path = os.path.join(subj_method_save_path, f_name)

            nib.save(new_img, param_file_path)

    return None
bvals, bvecs = read_bvals_bvecs(input_bval, input_bvec)
bvals_SI = bvals * 1e6
acq_scheme = acquisition_scheme_from_bvalues(bvals_SI, bvecs)
acq_scheme.print_acquisition_info

#Load the data
img = nib.load(input_dwi)
data = img.get_data()

#Load the mask
img = nib.load(input_mask)
mask_data = img.get_data()

ball = gaussian_models.G1Ball()  #CSF
stick = cylinder_models.C1Stick()  #Intra-axonal diffusion
zeppelin = gaussian_models.G2Zeppelin()  #Extra-axonal diffusion

if model_type == 'Bingham' or model_type == 'BINGHAM':
    dispersed_bundle = SD2BinghamDistributed(models=[stick, zeppelin])
else:
    dispersed_bundle = SD1WatsonDistributed(models=[stick, zeppelin])

dispersed_bundle.set_tortuous_parameter('G2Zeppelin_1_lambda_perp',
                                        'C1Stick_1_lambda_par',
                                        'partial_volume_0')
dispersed_bundle.set_equal_parameter('G2Zeppelin_1_lambda_par',
                                     'C1Stick_1_lambda_par')
dispersed_bundle.set_fixed_parameter('G2Zeppelin_1_lambda_par',
                                     parallel_diffusivity)

NODDI_mod = MultiCompartmentModel(models=[ball, dispersed_bundle])
def main():

    # Base Path of all given files for All models are wrong
    base_path = r'/nfs/masi/nathv/memento_2020/all_models_are_wrong/files_project_2927_session_1436090/'
    base_path = os.path.normpath(base_path)

    # Just dealing with PGSE for now
    pgse_acq_params_path = os.path.join(base_path, 'PGSE_AcqParams.txt')
    pgse_signal_path = os.path.join(base_path, 'PGSE_Simulations.txt')

    # Read files via Numpy
    pgse_acq_params = np.loadtxt(pgse_acq_params_path)
    pgse_signal_data = np.loadtxt(pgse_signal_path)
    pgse_example_sub_diff = np.loadtxt(
        '/nfs/masi/nathv/memento_2020/all_models_are_wrong/files_project_2927_session_1436090/2-AllModelsAreWrong-ExampleSubmissions/DIffusivity-ExampleSubmission3/PGSE.txt'
    )
    pgse_example_sub_volfrac = np.loadtxt(
        '/nfs/masi/nathv/memento_2020/all_models_are_wrong/files_project_2927_session_1436090/2-AllModelsAreWrong-ExampleSubmissions/VolumeFraction-ExampleSubmission3/PGSE.txt'
    )

    # Transpose the Signal data
    pgse_signal_data = pgse_signal_data.transpose()

    # Dissect the acquisition parameters to form the Acquisition Table
    bvecs = pgse_acq_params[:, 1:4]
    bvals = pgse_acq_params[:, 6] * 1e6
    grad_str = pgse_acq_params[:, 0]
    small_del = pgse_acq_params[:, 4]
    big_del = pgse_acq_params[:, 5]

    subj_Acq_Scheme = acquisition_scheme_from_bvalues(bvals,
                                                      bvecs,
                                                      delta=small_del,
                                                      Delta=big_del)

    print(subj_Acq_Scheme.print_acquisition_info)

    #### NODDI Watson ####
    ball = gaussian_models.G1Ball()
    stick = cylinder_models.C1Stick()
    zeppelin = gaussian_models.G2Zeppelin()

    bingham_dispersed_bundle = SD2BinghamDistributed(models=[stick, zeppelin])

    bingham_dispersed_bundle.set_tortuous_parameter('G2Zeppelin_1_lambda_perp',
                                                    'C1Stick_1_lambda_par',
                                                    'partial_volume_0')
    bingham_dispersed_bundle.set_equal_parameter('G2Zeppelin_1_lambda_par',
                                                 'C1Stick_1_lambda_par')
    bingham_dispersed_bundle.set_fixed_parameter('G2Zeppelin_1_lambda_par',
                                                 1.7e-9)

    NODDI_bingham_mod = MultiCompartmentModel(
        models=[ball, bingham_dispersed_bundle])
    NODDI_bingham_mod.set_fixed_parameter('G1Ball_1_lambda_iso', 3e-9)

    print('Fitting the NODDI Model ...')
    fit_start_time = time.time()
    NODDI_fit_hcp = NODDI_bingham_mod.fit(subj_Acq_Scheme,
                                          pgse_signal_data,
                                          use_parallel_processing=True,
                                          number_of_processors=8)

    fit_end_time = time.time()
    print('Model Fitting Completed ... Time Taken to fit: {}'.format(
        fit_end_time - fit_start_time))
    fit_time = np.int(np.round(fit_end_time - fit_start_time))

    sub_1_pv0 = NODDI_fit_hcp.fitted_parameters['partial_volume_0']
    sub_2_pv1 = NODDI_fit_hcp.fitted_parameters['partial_volume_1']

    np.savetxt('noddi_bingham_pv0.txt', sub_1_pv0)
    np.savetxt('noddi_bingham_pv1.txt', sub_2_pv1)

    print('Debug here')

    return None
Ejemplo n.º 23
0
def main():
    # Plot Save Path
    base_plot_path = r'/nfs/masi/nathv/py_src_code_2020/dmipy_model_pictures'
    base_plot_path = os.path.normpath(base_plot_path)

    # Method Saving Paths
    base_save_path = r'/nfs/masi/nathv/miccai_2020/micro_methods_hcp_mini'
    base_save_path = os.path.normpath(base_save_path)

    # Create base saving path for Method
    # TODO The Method name can be made an argument later on
    method_name = 'MC_SMT'

    # Base HCP Data Path
    base_data_path = r'/nfs/HCP/data'
    base_data_path = os.path.normpath(base_data_path)

    # Subject ID's list
    subj_ID_List = ['115017', '114823', '116726', '118225']
    # TODO When needed loop here over the ID list
    for subj_ID in subj_ID_List:
        # Subject Save Path
        subj_save_path = os.path.join(base_save_path, subj_ID)
        if os.path.exists(subj_save_path) == False:
            os.mkdir(subj_save_path)

        # TODO For later the subject data, bval and bvec reading part can be put inside a function
        subj_data_path = os.path.join(base_data_path, subj_ID, 'T1w',
                                      'Diffusion')

        # Read the Nifti file, bvals and bvecs
        subj_bvals = np.loadtxt(os.path.join(subj_data_path, 'bvals'))
        subj_bvecs = np.loadtxt(os.path.join(subj_data_path, 'bvecs'))

        all_bvals = subj_bvals * 1e6
        all_bvecs = np.transpose(subj_bvecs)

        subj_Acq_Scheme = acquisition_scheme_from_bvalues(all_bvals, all_bvecs)
        print(subj_Acq_Scheme.print_acquisition_info)

        print('Loading the Nifti Data ...')
        data_start_time = time.time()

        subj_babel_object = nib.load(
            os.path.join(subj_data_path, 'data.nii.gz'))
        subj_data = subj_babel_object.get_fdata()
        axial_slice_data = subj_data[:, :, 30:32, :]

        data_end_time = time.time()
        data_time = np.int(np.round(data_end_time - data_start_time))

        print('Data Loaded ... Time Taken: {}'.format(data_end_time -
                                                      data_start_time))
        print('The Data Dimensions are: {}'.format(subj_data.shape))

        #### MC-SMT Begin ####
        stick = cylinder_models.C1Stick()
        zeppelin = gaussian_models.G2Zeppelin()

        bundle = BundleModel([stick, zeppelin])

        # Model Paramter Constraints
        bundle.set_tortuous_parameter('G2Zeppelin_1_lambda_perp',
                                      'C1Stick_1_lambda_par',
                                      'partial_volume_0')
        bundle.set_equal_parameter('G2Zeppelin_1_lambda_par',
                                   'C1Stick_1_lambda_par')

        mcdmi_mod = modeling_framework.MultiCompartmentSphericalMeanModel(
            models=[bundle])

        # Get List of Estimated Parameter Names
        para_Names_list = mcdmi_mod.parameter_names

        print('Fitting the MC-SMT Model ...')
        fit_start_time = time.time()
        mcdmi_fit = mcdmi_mod.fit(subj_Acq_Scheme,
                                  subj_data,
                                  mask=subj_data[..., 0] > 0)
        fit_end_time = time.time()
        print('Model Fitting Completed ... Time Taken to fit: {}'.format(
            fit_end_time - fit_start_time))
        fit_time = np.int(np.round(fit_end_time - fit_start_time))

        fitted_parameters = mcdmi_fit.fitted_parameters

        ### Nifti Saving Part
        # Create a directory per subject
        subj_method_save_path = os.path.join(subj_save_path, method_name)
        if os.path.exists(subj_method_save_path) == False:
            os.mkdir(subj_method_save_path)

        # Retrieve the affine from already Read Nifti file to form the header
        affine = subj_babel_object.affine

        # Loop over fitted parameters name list
        for each_fitted_parameter in para_Names_list:
            new_img = nib.Nifti1Image(fitted_parameters[each_fitted_parameter],
                                      affine)

            # Form the file path
            f_name = each_fitted_parameter + '.nii.gz'
            param_file_path = os.path.join(subj_method_save_path, f_name)

            nib.save(new_img, param_file_path)

    return None