Example #1
0
def test_orienting_stick():
    # test for orienting the axis of the Stick along mu
    # first test to see if Estick equals Gaussian with lambda_par along mu
    random_n_mu_vector = np.random.rand(2) * np.pi
    n = utils.sphere2cart(np.r_[1, random_n_mu_vector])
    random_bval = np.r_[np.random.rand() * 1e9]
    random_lambda_par = np.random.rand() * 3e-9

    scheme = acquisition_scheme_from_bvalues(
        random_bval, np.atleast_2d(n), delta, Delta)
    # initialize model
    stick = cylinder_models.C1Stick(mu=random_n_mu_vector,
                                    lambda_par=random_lambda_par)

    # test if parallel direction attenuation as a Gaussian
    E_stick = stick(scheme)
    E_check = np.exp(-random_bval * (random_lambda_par))
    assert_almost_equal(E_stick, E_check)

    # test if perpendicular direction does not attenuate
    n_perp = perpendicular_vector(n)
    scheme = acquisition_scheme_from_bvalues(
        random_bval, np.atleast_2d(n_perp), delta, Delta)
    E_stick_perp = stick(scheme)
    assert_almost_equal(E_stick_perp, 1.)
def test_acq_scheme_without_deltas_model_catch():
    scheme = wu_minn_hcp_acquisition_scheme()
    test_data = np.random.rand(len(scheme.bvalues))
    scheme_clinical = acquisition_scheme_from_bvalues(
        scheme.bvalues, scheme.gradient_directions)
    mc_model = MultiCompartmentModel([C4CylinderGaussianPhaseApproximation()])
    assert_raises(ValueError, mc_model.fit, scheme_clinical, test_data)
def test_fitting_without_b0_raises():
    bvals = np.atleast_1d(1e9)
    bvecs = np.atleast_2d([1., 0., 0.])
    scheme = acquisition_scheme_from_bvalues(bvals, bvecs)
    mc = modeling_framework.MultiCompartmentModel([gaussian_models.G1Ball()])
    data = np.atleast_1d(1.)
    assert_raises(ValueError, mc.fit, scheme, data)
def test_acquisition_scheme_summary(Nsamples=10):
    bvals = np.tile(1e9, Nsamples)
    bvecs = np.tile(np.r_[1., 0., 0.], (Nsamples, 1))
    big_delta = 0.03
    small_delta = 0.01
    gtab_mipy = acquisition_scheme_from_bvalues(bvalues=bvals,
                                                gradient_directions=bvecs,
                                                delta=small_delta,
                                                Delta=big_delta)
    gtab_mipy.print_acquisition_info
def test_equivalent_scheme_bvals_and_bvecs(Nsamples=10):
    bvalues = np.tile(1, Nsamples)
    bvecs = np.tile(np.r_[1., 0., 0.], (Nsamples, 1))
    delta = np.ones(Nsamples)
    Delta = np.ones(Nsamples)
    scheme_from_bvals = acquisition_scheme_from_bvalues(
        bvalues, bvecs, delta, Delta)
    qvalues = scheme_from_bvals.qvalues
    scheme_from_qvals = acquisition_scheme_from_qvalues(
        qvalues, bvecs, delta, Delta)
    bvalues_from_qvalues = scheme_from_qvals.bvalues
    assert_array_equal(bvalues, bvalues_from_qvalues)
def test_equivalent_scheme_bvals_and_gradient_strength(Nsamples=10):
    bvalues = np.tile(1, Nsamples)
    bvecs = np.tile(np.r_[1., 0., 0.], (Nsamples, 1))
    delta = np.ones(Nsamples)
    Delta = np.ones(Nsamples)
    scheme_from_bvals = acquisition_scheme_from_bvalues(
        bvalues, bvecs, delta, Delta)
    gradient_strengths = scheme_from_bvals.gradient_strengths
    scheme_from_gradient_strengths = (
        acquisition_scheme_from_gradient_strengths(gradient_strengths, bvecs,
                                                   delta, Delta))
    bvalues_from_gradient_strengths = (scheme_from_gradient_strengths.bvalues)
    assert_array_equal(bvalues, bvalues_from_gradient_strengths)
def test_shell_indices_with_varying_diffusion_times(Nsamples=10):
    # tests whether measurements with the same bvalue but different diffusion
    # time are correctly classified in different shells
    bvalues = np.tile(1e9, Nsamples)
    delta = 0.01
    Delta = np.hstack(
        [np.tile(0.01,
                 len(bvalues) // 2),
         np.tile(0.03,
                 len(bvalues) // 2)])
    gradient_directions = np.tile(np.r_[1., 0., 0.], (Nsamples, 1))
    scheme = acquisition_scheme_from_bvalues(bvalues, gradient_directions,
                                             delta, Delta)
    assert_equal(len(np.unique(scheme.shell_indices)), 2)
def test_dmipy2dipy_acquisition_converter(Nsamples=10):
    bvals = np.tile(1e9, Nsamples)
    bvecs = np.tile(np.r_[1., 0., 0.], (Nsamples, 1))
    big_delta = 0.03
    small_delta = 0.01
    gtab_mipy = acquisition_scheme_from_bvalues(bvalues=bvals,
                                                gradient_directions=bvecs,
                                                delta=small_delta,
                                                Delta=big_delta)
    gtab_dipy = gtab_dmipy2dipy(gtab_mipy)
    assert_array_equal(gtab_mipy.bvalues / 1e6, gtab_dipy.bvals)
    assert_array_equal(gtab_mipy.gradient_directions, gtab_dipy.bvecs)
    assert_equal(gtab_mipy.Delta, gtab_dipy.big_delta)
    assert_equal(gtab_mipy.delta, gtab_dipy.small_delta)
Example #9
0
def test_orienting_zeppelin():
    # test for orienting the axis of the Zeppelin along mu
    # first test to see if Ezeppelin equals Gaussian with lambda_par along mu
    random_mu = np.random.rand(2) * np.pi
    n = np.array([utils.sphere2cart(np.r_[1, random_mu])])
    random_bval = np.r_[np.random.rand() * 1e9]
    scheme = acquisition_scheme_from_bvalues(random_bval, n, delta, Delta)
    random_lambda_par = np.random.rand() * 3 * 1e-9
    random_lambda_perp = random_lambda_par / 2.

    zeppelin = gaussian_models.G2Zeppelin(
        mu=random_mu, lambda_par=random_lambda_par,
        lambda_perp=random_lambda_perp)
    E_zep_par = zeppelin(scheme)
    E_check_par = np.exp(-random_bval * random_lambda_par)
    assert_almost_equal(E_zep_par, E_check_par)

    # second test to see if Ezeppelin equals Gaussian with lambda_perp
    # perpendicular to mu
    n_perp = np.array([perpendicular_vector(n[0])])
    scheme = acquisition_scheme_from_bvalues(random_bval, n_perp, delta, Delta)
    E_zep_perp = zeppelin(scheme)
    E_check_perp = np.exp(-random_bval * random_lambda_perp)
    assert_almost_equal(E_zep_perp, E_check_perp)
def test_gamma_distributed_models_spherical_mean_numerical(
        bvalue=1e9, delta=1e-2, Delta=2e-2):
    bvals = np.tile(bvalue, len(sphere.vertices))
    scheme = acquisition_scheme_from_bvalues(bvals, sphere.vertices, delta,
                                             Delta)
    for model in distributable_models:
        dist_mod = DD1GammaDistributed([model])
        params = {}
        for param, card in dist_mod.parameter_cardinality.items():
            params[param] = (np.random.rand(card) *
                             dist_mod.parameter_scales[param])

        signal_shell = dist_mod(scheme, **params)
        signal_shell_smt = np.mean(signal_shell)
        signal_smt = dist_mod.spherical_mean(scheme, **params)
        assert_almost_equal(signal_shell_smt, signal_smt, 2)
def test_model_spherical_mean_analytic_vs_numerical(bvalue=1e9,
                                                    delta=1e-2,
                                                    Delta=2e-2):
    bvals = np.tile(bvalue, len(sphere.vertices))
    scheme = acquisition_scheme_from_bvalues(bvals, sphere.vertices, delta,
                                             Delta)
    for model in models:
        params = {}
        for param, card in model.parameter_cardinality.items():
            params[param] = (np.random.rand(card) *
                             model.parameter_scales[param])

        signal_shell = model(scheme, **params)
        signal_shell_smt = np.mean(signal_shell)
        signal_smt = model.spherical_mean(scheme, **params)
        assert_almost_equal(signal_shell_smt, signal_smt, 2)
def test_spherical_convolution_watson_sh(sh_order=4):
    sphere = get_sphere('symmetric724')

    n = sphere.vertices
    bval = np.tile(1e9, len(n))
    scheme = acquisition_scheme_from_bvalues(bval, n, delta, Delta)
    indices_sphere_orientations = np.arange(sphere.vertices.shape[0])
    np.random.shuffle(indices_sphere_orientations)
    mu_index = indices_sphere_orientations[0]
    mu_watson = sphere.vertices[mu_index]
    mu_watson_sphere = utils.cart2sphere(mu_watson)[1:]

    watson = distributions.SD1Watson(mu=mu_watson_sphere, odi=.3)
    f_sf = watson(n=sphere.vertices)
    f_sh = sf_to_sh(f_sf, sphere, sh_order)

    lambda_par = 2e-9
    stick = cylinder_models.C1Stick(mu=[0, 0], lambda_par=lambda_par)
    k_sf = stick(scheme)
    sh_matrix, m, n = real_sym_sh_mrtrix(sh_order, sphere.theta, sphere.phi)
    sh_matrix_inv = np.linalg.pinv(sh_matrix)
    k_sh = np.dot(sh_matrix_inv, k_sf)
    k_rh = k_sh[m == 0]

    fk_convolved_sh = sh_convolution(f_sh, k_rh)
    fk_convolved_sf = sh_to_sf(fk_convolved_sh, sphere, sh_order)

    # assert if spherical mean is the same between kernel and convolved kernel
    assert_almost_equal(abs(np.mean(k_sf) - np.mean(fk_convolved_sf)), 0., 2)
    # assert if the lowest signal attenuation (E(b,n)) is orientation along
    # the orientation of the watson distribution.
    min_position = np.argmin(fk_convolved_sf)

    if min_position == mu_index:
        assert_equal(min_position, mu_index)
    else:  # then it's the opposite direction
        sphere_positions = np.arange(sphere.vertices.shape[0])
        opposite_index = np.all(
            np.round(sphere.vertices - mu_watson, 2) == 0, axis=1
        )
        min_position_opposite = sphere_positions[opposite_index]
        assert_equal(min_position_opposite, mu_index)
Example #13
0
def main():
    # Plot Save Path
    base_plot_path = r'/nfs/masi/nathv/py_src_code_2020/dmipy_model_pictures'
    base_plot_path = os.path.normpath(base_plot_path)

    # Method Saving Paths
    base_save_path = r'/nfs/masi/nathv/miccai_2020/micro_methods_hcp_mini'
    base_save_path = os.path.normpath(base_save_path)

    # Create base saving path for Method
    # TODO The Method name can be made an argument later on
    method_name = 'MC_SMT'

    # Base HCP Data Path
    base_data_path = r'/nfs/HCP/data'
    base_data_path = os.path.normpath(base_data_path)

    # Subject ID's list
    subj_ID_List = ['115017', '114823', '116726', '118225']
    # TODO When needed loop here over the ID list
    for subj_ID in subj_ID_List:
        # Subject Save Path
        subj_save_path = os.path.join(base_save_path, subj_ID)
        if os.path.exists(subj_save_path) == False:
            os.mkdir(subj_save_path)

        # TODO For later the subject data, bval and bvec reading part can be put inside a function
        subj_data_path = os.path.join(base_data_path, subj_ID, 'T1w',
                                      'Diffusion')

        # Read the Nifti file, bvals and bvecs
        subj_bvals = np.loadtxt(os.path.join(subj_data_path, 'bvals'))
        subj_bvecs = np.loadtxt(os.path.join(subj_data_path, 'bvecs'))

        all_bvals = subj_bvals * 1e6
        all_bvecs = np.transpose(subj_bvecs)

        subj_Acq_Scheme = acquisition_scheme_from_bvalues(all_bvals, all_bvecs)
        print(subj_Acq_Scheme.print_acquisition_info)

        print('Loading the Nifti Data ...')
        data_start_time = time.time()

        subj_babel_object = nib.load(
            os.path.join(subj_data_path, 'data.nii.gz'))
        subj_data = subj_babel_object.get_fdata()
        axial_slice_data = subj_data[:, :, 30:32, :]

        data_end_time = time.time()
        data_time = np.int(np.round(data_end_time - data_start_time))

        print('Data Loaded ... Time Taken: {}'.format(data_end_time -
                                                      data_start_time))
        print('The Data Dimensions are: {}'.format(subj_data.shape))

        #### MC-SMT Begin ####
        stick = cylinder_models.C1Stick()
        zeppelin = gaussian_models.G2Zeppelin()

        bundle = BundleModel([stick, zeppelin])

        # Model Paramter Constraints
        bundle.set_tortuous_parameter('G2Zeppelin_1_lambda_perp',
                                      'C1Stick_1_lambda_par',
                                      'partial_volume_0')
        bundle.set_equal_parameter('G2Zeppelin_1_lambda_par',
                                   'C1Stick_1_lambda_par')

        mcdmi_mod = modeling_framework.MultiCompartmentSphericalMeanModel(
            models=[bundle])

        # Get List of Estimated Parameter Names
        para_Names_list = mcdmi_mod.parameter_names

        print('Fitting the MC-SMT Model ...')
        fit_start_time = time.time()
        mcdmi_fit = mcdmi_mod.fit(subj_Acq_Scheme,
                                  subj_data,
                                  mask=subj_data[..., 0] > 0)
        fit_end_time = time.time()
        print('Model Fitting Completed ... Time Taken to fit: {}'.format(
            fit_end_time - fit_start_time))
        fit_time = np.int(np.round(fit_end_time - fit_start_time))

        fitted_parameters = mcdmi_fit.fitted_parameters

        ### Nifti Saving Part
        # Create a directory per subject
        subj_method_save_path = os.path.join(subj_save_path, method_name)
        if os.path.exists(subj_method_save_path) == False:
            os.mkdir(subj_method_save_path)

        # Retrieve the affine from already Read Nifti file to form the header
        affine = subj_babel_object.affine

        # Loop over fitted parameters name list
        for each_fitted_parameter in para_Names_list:
            new_img = nib.Nifti1Image(fitted_parameters[each_fitted_parameter],
                                      affine)

            # Form the file path
            f_name = each_fitted_parameter + '.nii.gz'
            param_file_path = os.path.join(subj_method_save_path, f_name)

            nib.save(new_img, param_file_path)

    return None
Example #14
0
from dmipy.signal_models import gaussian_models, sphere_models
from numpy.testing import assert_array_equal, assert_equal
import numpy as np
from dmipy.core.acquisition_scheme import (
    acquisition_scheme_from_bvalues)

bvals = np.random.rand(10) * 1e9
bvecs = np.random.rand(10, 3)
bvecs /= np.linalg.norm(bvecs, axis=1)[:, None]
delta = 0.01
Delta = 0.03
scheme = acquisition_scheme_from_bvalues(bvals, bvecs, delta, Delta)


def test_dot():
    dot = sphere_models.S1Dot()
    E_dot = dot(scheme)
    assert_equal(np.all(E_dot == 1.), True)


def test_ball(lambda_iso=1.7e-9):
    ball = gaussian_models.G1Ball(lambda_iso=lambda_iso)
    E_ball = ball(scheme)
    E = np.exp(-bvals * lambda_iso)
    assert_array_equal(E, E_ball)
def main():
    # Plot Save Path
    base_plot_path = r'/nfs/masi/nathv/py_src_code_2020/dmipy_model_pictures'
    base_plot_path = os.path.normpath(base_plot_path)

    # Method Saving Paths
    base_save_path = r'/nfs/masi/nathv/miccai_2020/micro_methods_hcp_mini'
    base_save_path = os.path.normpath(base_save_path)

    # Create base saving path for Method
    # TODO The Method name can be made an argument later on
    method_name = 'IVIM'

    # Base HCP Data Path
    base_data_path = r'/nfs/HCP/data'
    base_data_path = os.path.normpath(base_data_path)

    # Subject ID's list
    subj_ID_List = ['115017', '114823', '116726', '118225']
    # TODO When needed loop here over the ID list
    for subj_ID in subj_ID_List:
        # Subject Save Path
        subj_save_path = os.path.join(base_save_path, subj_ID)
        if os.path.exists(subj_save_path) == False:
            os.mkdir(subj_save_path)

        # TODO For later the subject data, bval and bvec reading part can be put inside a function
        subj_data_path = os.path.join(base_data_path, subj_ID, 'T1w',
                                      'Diffusion')

        # Read the Nifti file, bvals and bvecs
        subj_bvals = np.loadtxt(os.path.join(subj_data_path, 'bvals'))
        subj_bvecs = np.loadtxt(os.path.join(subj_data_path, 'bvecs'))

        all_bvals = subj_bvals * 1e6
        all_bvecs = np.transpose(subj_bvecs)

        subj_Acq_Scheme = acquisition_scheme_from_bvalues(all_bvals, all_bvecs)
        print(subj_Acq_Scheme.print_acquisition_info)

        print('Loading the Nifti Data ...')
        data_start_time = time.time()

        subj_babel_object = nib.load(
            os.path.join(subj_data_path, 'data.nii.gz'))
        subj_data = subj_babel_object.get_fdata()
        axial_slice_data = subj_data[:, :, 25:27, :]

        data_end_time = time.time()
        data_time = np.int(np.round(data_end_time - data_start_time))

        print('Data Loaded ... Time Taken: {}'.format(data_end_time -
                                                      data_start_time))
        print('The Data Dimensions are: {}'.format(subj_data.shape))

        #### IVIM ####
        print('Fitting IVIM ...')
        fit_start_time = time.time()
        ivim_fit_dmipy_fixed = ivim_Dstar_fixed(subj_Acq_Scheme,
                                                subj_data,
                                                mask=subj_data[..., 0] > 0)
        fit_end_time = time.time()
        print('IVIM Fitting Completed ... Time Taken to fit: {}'.format(
            fit_end_time - fit_start_time))
        fit_time = np.int(np.round(fit_end_time - fit_start_time))

        # Get List of Estimated Parameter Names
        # para_Names_list = ivim_fit_dmipy_fixed.parameter_names

        fitted_parameters = ivim_fit_dmipy_fixed.fitted_parameters

        para_Names_list = []
        for key, value in fitted_parameters.items():
            para_Names_list.append(key)

        ### Nifti Saving Part
        # Create a directory per subject
        subj_method_save_path = os.path.join(subj_save_path, method_name)
        if os.path.exists(subj_method_save_path) == False:
            os.mkdir(subj_method_save_path)

        # Retrieve the affine from already Read Nifti file to form the header
        affine = subj_babel_object.affine

        # Loop over fitted parameters name list
        for each_fitted_parameter in para_Names_list:
            new_img = nib.Nifti1Image(fitted_parameters[each_fitted_parameter],
                                      affine)

            # Form the file path
            f_name = each_fitted_parameter + '.nii.gz'
            param_file_path = os.path.join(subj_method_save_path, f_name)

            nib.save(new_img, param_file_path)

    return None
def main():
    # Plot Save Path
    base_plot_path = r'/nfs/masi/nathv/py_src_code_2020/dmipy_model_pictures'
    base_plot_path = os.path.normpath(base_plot_path)

    # Method Saving Paths
    base_save_path = r'/nfs/masi/nathv/miccai_2020/micro_methods_hcp_mini'
    base_save_path = os.path.normpath(base_save_path)

    # Create base saving path for Method
    # TODO The Method name can be made an argument later on
    method_name = 'MT_CSD'

    # Base HCP Data Path
    base_data_path = r'/nfs/HCP/data'
    base_data_path = os.path.normpath(base_data_path)

    # Subject ID's list
    subj_ID_List = ['115017', '114823', '116726', '118225', '115825', '125525']
    # TODO When needed loop here over the ID list
    for subj_ID in subj_ID_List:
        # Subject Save Path
        subj_save_path = os.path.join(base_save_path, subj_ID)
        if os.path.exists(subj_save_path) == False:
            os.mkdir(subj_save_path)

        # TODO For later the subject data, bval and bvec reading part can be put inside a function
        subj_data_path = os.path.join(base_data_path, subj_ID, 'T1w',
                                      'Diffusion')

        # Read the Nifti file, bvals and bvecs
        subj_bvals = np.loadtxt(os.path.join(subj_data_path, 'bvals'))
        subj_bvecs = np.loadtxt(os.path.join(subj_data_path, 'bvecs'))

        all_bvals = subj_bvals * 1e6
        all_bvecs = np.transpose(subj_bvecs)

        subj_Acq_Scheme = acquisition_scheme_from_bvalues(all_bvals, all_bvecs)
        print(subj_Acq_Scheme.print_acquisition_info)

        print('Loading the Nifti Data ...')
        data_start_time = time.time()

        subj_babel_object = nib.load(
            os.path.join(subj_data_path, 'data.nii.gz'))
        subj_data = subj_babel_object.get_fdata()
        axial_slice_data = subj_data[:, :, 30:32, :]

        data_end_time = time.time()
        data_time = np.int(np.round(data_end_time - data_start_time))

        print('Data Loaded ... Time Taken: {}'.format(data_end_time -
                                                      data_start_time))
        print('The Data Dimensions are: {}'.format(subj_data.shape))

        #### MT-CSD Begin ####

        S0_tissue_responses, tissue_response_models, selection_map = three_tissue_response_dhollander16(
            subj_Acq_Scheme,
            subj_data,
            wm_algorithm='tournier13',
            wm_N_candidate_voxels=10,
            gm_perc=0.2,
            csf_perc=0.4)
        TR2_wm, TR1_gm, TR1_csf = tissue_response_models
        S0_wm, S0_gm, S0_csf = S0_tissue_responses

        mt_csd_mod = MultiCompartmentSphericalHarmonicsModel(
            models=tissue_response_models,
            S0_tissue_responses=S0_tissue_responses)

        fit_args = {
            'acquisition_scheme': subj_Acq_Scheme,
            'data': subj_data,
            'mask': subj_data[..., 0] > 0
        }

        mt_csd_fits = []
        for fit_S0_response in [True, False]:
            mt_csd_fits.append(
                mt_csd_mod.fit(fit_S0_response=fit_S0_response, **fit_args))

        # Get List of Estimated Parameter Names
        para_Names_list = mt_csd_mod.parameter_names

        print('Fitting the MT-CSD Model ...')
        fit_start_time = time.time()
        #mcdmi_fit = mcdmi_mod.fit(subj_Acq_Scheme, subj_data, mask=subj_data[..., 0] > 0)
        fit_end_time = time.time()
        print('Model Fitting Completed ... Time Taken to fit: {}'.format(
            fit_end_time - fit_start_time))
        fit_time = np.int(np.round(fit_end_time - fit_start_time))

        fitted_parameters = mt_csd_fits.fitted_parameters

        ### Nifti Saving Part
        # Create a directory per subject
        subj_method_save_path = os.path.join(subj_save_path, method_name)
        if os.path.exists(subj_method_save_path) == False:
            os.mkdir(subj_method_save_path)

        # Retrieve the affine from already Read Nifti file to form the header
        affine = subj_babel_object.affine

        # Loop over fitted parameters name list
        for each_fitted_parameter in para_Names_list:
            new_img = nib.Nifti1Image(fitted_parameters[each_fitted_parameter],
                                      affine)

            # Form the file path
            f_name = each_fitted_parameter + '.nii.gz'
            param_file_path = os.path.join(subj_method_save_path, f_name)

            nib.save(new_img, param_file_path)

    return None
def main():
    # Plot Save Path
    base_plot_path = r'/nfs/masi/nathv/py_src_code_2020/dmipy_model_pictures'
    base_plot_path = os.path.normpath(base_plot_path)

    # Method Saving Paths
    base_save_path = r'/nfs/masi/nathv/miccai_2020/micro_methods_hcp_mini'
    base_save_path = os.path.normpath(base_save_path)

    # Create base saving path for Method
    # TODO The Method name can be made an argument later on
    method_name = 'VERDICT'

    # Base HCP Data Path
    base_data_path = r'/nfs/HCP/data'
    base_data_path = os.path.normpath(base_data_path)

    # Subject ID's list
    subj_ID_List = ['125525', '118225', '116726', '115825', '115017', '114823']
    # TODO When needed loop here over the ID list
    for subj_ID in subj_ID_List:
        # Subject Save Path
        subj_save_path = os.path.join(base_save_path, subj_ID)
        if os.path.exists(subj_save_path) == False:
            os.mkdir(subj_save_path)

        # TODO For later the subject data, bval and bvec reading part can be put inside a function
        subj_data_path = os.path.join(base_data_path, subj_ID_List[0], 'T1w',
                                      'Diffusion')

        # Read the Nifti file, bvals and bvecs
        subj_bvals = np.loadtxt(os.path.join(subj_data_path, 'bvals'))
        subj_bvecs = np.loadtxt(os.path.join(subj_data_path, 'bvecs'))

        all_bvals = subj_bvals * 1e6
        all_bvecs = np.transpose(subj_bvecs)

        subj_Acq_Scheme = acquisition_scheme_from_bvalues(all_bvals,
                                                          all_bvecs,
                                                          delta=10.6 * 1e-3,
                                                          Delta=43.1 * 1e-3,
                                                          TE=89.5 * 1e-3)
        print(subj_Acq_Scheme.print_acquisition_info)

        print('Loading the Nifti Data ...')
        data_start_time = time.time()

        subj_babel_object = nib.load(
            os.path.join(subj_data_path, 'data.nii.gz'))
        subj_data = subj_babel_object.get_fdata()
        axial_slice_data = subj_data[55:60, 65:70, 60:62, :]

        data_end_time = time.time()
        data_time = np.int(np.round(data_end_time - data_start_time))

        print('Data Loaded ... Time Taken: {}'.format(data_end_time -
                                                      data_start_time))
        print('The Data Dimensions are: {}'.format(subj_data.shape))

        #### Verdict Begin ####
        sphere = sphere_models.S4SphereGaussianPhaseApproximation(
            diffusion_constant=0.9e-9)
        ball = gaussian_models.G1Ball()
        stick = cylinder_models.C1Stick()

        verdict_mod = MultiCompartmentModel(models=[sphere, ball, stick])

        verdict_mod.set_fixed_parameter('G1Ball_1_lambda_iso', 0.9e-9)
        verdict_mod.set_parameter_optimization_bounds('C1Stick_1_lambda_par',
                                                      [3.05e-9, 10e-9])

        print('Fitting the Verdict Model ...')
        fit_start_time = time.time()
        mcdmi_fit = verdict_mod.fit(subj_Acq_Scheme,
                                    axial_slice_data,
                                    mask=axial_slice_data[..., 0] > 0,
                                    solver='mix',
                                    use_parallel_processing=False)
        fit_end_time = time.time()
        print('Model Fitting Completed ... Time Taken to fit: {}'.format(
            fit_end_time - fit_start_time))
        fit_time = np.int(np.round(fit_end_time - fit_start_time))

        fitted_parameters = mcdmi_fit.fitted_parameters

        # Get List of Estimated Parameter Names
        para_Names_list = []
        for key, value in fitted_parameters.items():
            para_Names_list.append(key)

        ### Nifti Saving Part
        # Create a directory per subject
        subj_method_save_path = os.path.join(subj_save_path, method_name)
        if os.path.exists(subj_method_save_path) == False:
            os.mkdir(subj_method_save_path)

        # Retrieve the affine from already Read Nifti file to form the header
        affine = subj_babel_object.affine

        # Loop over fitted parameters name list
        for each_fitted_parameter in para_Names_list:
            new_img = nib.Nifti1Image(fitted_parameters[each_fitted_parameter],
                                      affine)

            # Form the file path
            f_name = each_fitted_parameter + '.nii.gz'
            param_file_path = os.path.join(subj_method_save_path, f_name)

            nib.save(new_img, param_file_path)

    return None
def main():
    # Plot Save Path
    base_plot_path = r'/nfs/masi/nathv/py_src_code_2020/dmipy_model_pictures'
    base_plot_path = os.path.normpath(base_plot_path)

    # Method Saving Paths
    base_save_path = r'/nfs/masi/nathv/miccai_2020/micro_methods_hcp_mini'
    base_save_path = os.path.normpath(base_save_path)

    # Create base saving path for Method
    # TODO The Method name can be made an argument later on
    method_name = 'NODDI_WATSON'

    # Base HCP Data Path
    base_data_path = r'/nfs/HCP/data'
    base_data_path = os.path.normpath(base_data_path)

    # Subject ID's list
    #subj_ID_List = ['125525', '118225', '116726']
    subj_ID_List = ['115017', '114823', '116726', '118225']
    # TODO When needed loop here over the ID list
    for subj_ID in subj_ID_List:
        # Subject Save Path
        subj_save_path = os.path.join(base_save_path, subj_ID)
        if os.path.exists(subj_save_path) == False:
            os.mkdir(subj_save_path)

        # TODO For later the subject data, bval and bvec reading part can be put inside a function
        subj_data_path = os.path.join(base_data_path, subj_ID, 'T1w',
                                      'Diffusion')

        # Read the Nifti file, bvals and bvecs
        subj_bvals = np.loadtxt(os.path.join(subj_data_path, 'bvals'))
        subj_bvecs = np.loadtxt(os.path.join(subj_data_path, 'bvecs'))

        all_bvals = subj_bvals * 1e6
        all_bvecs = np.transpose(subj_bvecs)

        subj_Acq_Scheme = acquisition_scheme_from_bvalues(all_bvals, all_bvecs)
        print(subj_Acq_Scheme.print_acquisition_info)

        print('Loading the Nifti Data ...')
        data_start_time = time.time()

        subj_babel_object = nib.load(
            os.path.join(subj_data_path, 'data.nii.gz'))
        subj_data = subj_babel_object.get_fdata()
        axial_slice_data = subj_data[50:65, 50:65, 60:62, :]

        data_end_time = time.time()
        data_time = np.int(np.round(data_end_time - data_start_time))

        print('Data Loaded ... Time Taken: {}'.format(data_end_time -
                                                      data_start_time))
        print('The Data Dimensions are: {}'.format(subj_data.shape))

        #### NODDI Watson ####
        ball = gaussian_models.G1Ball()
        stick = cylinder_models.C1Stick()
        zeppelin = gaussian_models.G2Zeppelin()

        watson_dispersed_bundle = SD1WatsonDistributed(
            models=[stick, zeppelin])

        watson_dispersed_bundle.set_tortuous_parameter(
            'G2Zeppelin_1_lambda_perp', 'C1Stick_1_lambda_par',
            'partial_volume_0')
        watson_dispersed_bundle.set_equal_parameter('G2Zeppelin_1_lambda_par',
                                                    'C1Stick_1_lambda_par')
        watson_dispersed_bundle.set_fixed_parameter('G2Zeppelin_1_lambda_par',
                                                    1.7e-9)

        NODDI_mod = MultiCompartmentModel(
            models=[ball, watson_dispersed_bundle])
        NODDI_mod.set_fixed_parameter('G1Ball_1_lambda_iso', 3e-9)

        print('Fitting the NODDI Model ...')
        fit_start_time = time.time()
        NODDI_fit_hcp = NODDI_mod.fit(subj_Acq_Scheme,
                                      subj_data,
                                      mask=subj_data[..., 0] > 0)
        fit_end_time = time.time()
        print('Model Fitting Completed ... Time Taken to fit: {}'.format(
            fit_end_time - fit_start_time))
        fit_time = np.int(np.round(fit_end_time - fit_start_time))

        fitted_parameters = NODDI_fit_hcp.fitted_parameters

        para_Names_list = []
        for key, value in fitted_parameters.items():
            para_Names_list.append(key)

        ### Nifti Saving Part
        # Create a directory per subject
        subj_method_save_path = os.path.join(subj_save_path, method_name)
        if os.path.exists(subj_method_save_path) == False:
            os.mkdir(subj_method_save_path)

        # Retrieve the affine from already Read Nifti file to form the header
        affine = subj_babel_object.affine

        # Loop over fitted parameters name list
        for each_fitted_parameter in para_Names_list:
            new_img = nib.Nifti1Image(fitted_parameters[each_fitted_parameter],
                                      affine)

            # Form the file path
            f_name = each_fitted_parameter + '.nii.gz'
            param_file_path = os.path.join(subj_method_save_path, f_name)

            nib.save(new_img, param_file_path)

    return None
solver = 'brute2fine'

input = sys.argv[1:]
input_dwi = input[0]
input_bval = input[1]
input_bvec = input[2]
input_mask = input[3]
ouput_dir = input[4]

if not os.path.exists(output_dir):
    os.mkdir(output_dir)

#Setup the acquisition scheme
bvals, bvecs = read_bvals_bvecs(input_bval, input_bvec)
bvals_SI = bvals * 1e6
acq_scheme = acquisition_scheme_from_bvalues(bvals_SI, bvecs)
acq_scheme.print_acquisition_info

#Load the data
img = nib.load(input_dwi)
data = img.get_data()

#Load the mask
img = nib.load(input_mask)
mask_data = img.get_data()

ball = gaussian_models.G1Ball()  #CSF
stick = cylinder_models.C1Stick()  #Intra-axonal diffusion
zeppelin = gaussian_models.G2Zeppelin()  #Extra-axonal diffusion

if model_type == 'Bingham' or model_type == 'BINGHAM':
def main():

    # Define Base Data Paths here
    base_data_path = r'/nfs/masi/nathv/spinal_cord_data_2020/SingleVoxelSignals_norm/SingleVoxelSignals_norm'
    base_data_path = os.path.normpath(base_data_path)

    # Define Saving Paths here
    save_data_path = r'/nfs/masi/nathv/spinal_cord_data_2020/results_norm/intra_extra_rest'
    save_data_path = os.path.normpath(save_data_path)
    if os.path.exists(save_data_path) == False:
        os.mkdir(save_data_path)

    # Scheme and The Directions file Paths
    scheme_path = os.path.join(base_data_path, 'scheme.scheme')
    bvecs_path = os.path.join(base_data_path, 'BVECS.bvec')
    bvals_path = os.path.join(base_data_path, 'BVALS.bval')

    # Voxel Paths
    voxel_fc_path = os.path.join(base_data_path, 'FasciulusCuneatus.txt')
    voxel_lc_path = os.path.join(base_data_path, 'LateralCST.txt')
    voxel_sl_path = os.path.join(base_data_path, 'SpinalLemniscus.txt')
    voxel_vc_path = os.path.join(base_data_path, 'VentralCST.txt')
    voxel_vh_path = os.path.join(base_data_path, 'VentralHorn.txt')

    # Reading the Scheme and the Directions
    scheme_data = np.loadtxt(scheme_path)
    bvecs_data = np.loadtxt(bvecs_path)
    bvals_data = np.loadtxt(bvals_path)

    # Read the voxel Data
    fc_data = []
    with open(voxel_fc_path) as csvfile:
        readcsv = csv.reader(csvfile, delimiter=',')
        for row in readcsv:
            fc_data.append(row)
    csvfile.close()
    fc_data = np.asarray(fc_data, dtype='float32')
    print('FC Voxel Shape: {}'.format(fc_data.shape))

    lc_data = []
    with open(voxel_lc_path) as csvfile:
        readcsv = csv.reader(csvfile, delimiter=',')
        for row in readcsv:
            lc_data.append(row)
    csvfile.close()
    lc_data = np.asarray(lc_data, dtype='float32')
    print('LC Voxel Shape: {}'.format(lc_data.shape))

    sl_data = []
    with open(voxel_sl_path) as csvfile:
        readcsv = csv.reader(csvfile, delimiter=',')
        for row in readcsv:
            sl_data.append(row)
    csvfile.close()
    sl_data = np.asarray(sl_data, dtype='float32')
    print('SL Voxel Shape: {}'.format(sl_data.shape))

    vc_data = []
    with open(voxel_vc_path) as csvfile:
        readcsv = csv.reader(csvfile, delimiter=',')
        for row in readcsv:
            vc_data.append(row)
    csvfile.close()
    vc_data = np.asarray(vc_data, dtype='float32')
    print('VC Voxel Shape: {}'.format(vc_data.shape))

    vh_data = []
    with open(voxel_vh_path) as csvfile:
        readcsv = csv.reader(csvfile, delimiter=',')
        for row in readcsv:
            vh_data.append(row)
    csvfile.close()
    vh_data = np.asarray(vh_data, dtype='float32')
    print('VH Voxel Shape: {}'.format(vh_data.shape))

    print('All Data Loaded ...')

    print('Constructing Acquisition Schemes')
    all_bvals = bvals_data * 1e6
    all_bvecs = np.transpose(bvecs_data)

    little_delta = scheme_data[:, 0]
    big_delta = scheme_data[:, 1]
    #t_e = scheme_data[:, 4]

    Acq_Scheme = acquisition_scheme_from_bvalues(all_bvals,
                                                 all_bvecs,
                                                 delta=little_delta * 1e-3,
                                                 Delta=big_delta * 1e-3)

    print(Acq_Scheme.print_acquisition_info)

    cylinder_dict = {
        'C1': cyn.C1Stick,
        'C2': cyn.C2CylinderStejskalTannerApproximation,
        'C3': cyn.C3CylinderCallaghanApproximation,
        'C4': cyn.C4CylinderGaussianPhaseApproximation
    }

    gaussian_dict = {'G1': gsn.G1Ball, 'G2': gsn.G2Zeppelin}

    sphere_dict = {
        'S1': sph.S1Dot,
        'S2': sph.S2SphereStejskalTannerApproximation,
        'S4': sph.S4SphereGaussianPhaseApproximation
    }

    # FC Saving path
    fc_save_path = os.path.join(save_data_path, 'FC')
    if os.path.exists(fc_save_path) == False:
        os.mkdir(fc_save_path)

    lc_save_path = os.path.join(save_data_path, 'LC')
    if os.path.exists(lc_save_path) == False:
        os.mkdir(lc_save_path)

    sl_save_path = os.path.join(save_data_path, 'SL')
    if os.path.exists(sl_save_path) == False:
        os.mkdir(sl_save_path)

    vc_save_path = os.path.join(save_data_path, 'VC')
    if os.path.exists(vc_save_path) == False:
        os.mkdir(vc_save_path)

    vh_save_path = os.path.join(save_data_path, 'VH')
    if os.path.exists(vh_save_path) == False:
        os.mkdir(vh_save_path)

    #TODO Double Combinations of Intra and Extra.
    for cyn_key, cyn_val in cylinder_dict.items():
        for gsn_key, gsn_val in gaussian_dict.items():

            # File name
            model_file_name = cyn_key + '_' + gsn_key + '.json'
            signal_file_name = cyn_key + '_' + gsn_key + '_signal.txt'

            cylinder = cyn_val()
            gaussian = gsn_val()

            multi_compat_model = MultiCompartmentModel(
                models=[cylinder, gaussian])

            # TODO If more than two mu exist, implies multiple orientation based measures exist
            # Hence for them we will identify them and set them to be equal to each other.
            mu_list = []
            for each_para_name in multi_compat_model.parameter_names:
                # Last three characters of parameter
                mu_type = each_para_name[-2:]
                if mu_type == 'mu':
                    mu_list.append(each_para_name)
                    #multi_compat_model.set_fixed_parameter(each_para_name, 1.7e-9)

            if len(mu_list) == 2:
                multi_compat_model.set_equal_parameter(mu_list[0], mu_list[1])
            # End of mu conditions
            print(multi_compat_model.parameter_names)

            ######## FC #########
            fc_model_fit = multi_compat_model.fit(
                Acq_Scheme,
                fc_data,
                use_parallel_processing=False,
                solver='mix')
            fc_fitted_params = fc_model_fit.fitted_parameters
            fc_model_signal = fc_model_fit.predict()

            ## Save FC Signal
            fc_model_signal = fc_model_signal[0, :].tolist()
            signal_save_path = os.path.join(fc_save_path, signal_file_name)
            np.savetxt(signal_save_path, fc_model_signal)
            #################

            ## Error Calculations
            fc_mse = fc_model_fit.mean_squared_error(fc_data)
            ##

            new_params = {}
            new_params['mse'] = fc_mse.tolist()

            for key, value in fc_fitted_params.items():
                new_params[key] = value.tolist()

            model_save_path = os.path.join(fc_save_path, model_file_name)

            with open(model_save_path, 'w') as json_file:
                json.dump(new_params, json_file)
            json_file.close()
            #####################

            ######## LC #########
            lc_model_fit = multi_compat_model.fit(
                Acq_Scheme,
                lc_data,
                use_parallel_processing=False,
                solver='mix')
            lc_fitted_params = lc_model_fit.fitted_parameters
            lc_model_signal = lc_model_fit.predict()

            ## Save LC Signal
            lc_model_signal = lc_model_signal[0, :].tolist()
            signal_save_path = os.path.join(lc_save_path, signal_file_name)
            np.savetxt(signal_save_path, lc_model_signal)
            #################

            ## Error Calculations
            lc_mse = lc_model_fit.mean_squared_error(lc_data)
            ##

            new_params = {}
            new_params['mse'] = lc_mse.tolist()

            for key, value in lc_fitted_params.items():
                new_params[key] = value.tolist()

            model_save_path = os.path.join(lc_save_path, model_file_name)

            with open(model_save_path, 'w') as json_file:
                json.dump(new_params, json_file)
            json_file.close()
            #####################

            ######## SL #########
            sl_model_fit = multi_compat_model.fit(
                Acq_Scheme,
                sl_data,
                use_parallel_processing=False,
                solver='mix')
            sl_fitted_params = sl_model_fit.fitted_parameters
            sl_model_signal = sl_model_fit.predict()

            ## Save SL Signal
            sl_model_signal = sl_model_signal[0, :].tolist()
            signal_save_path = os.path.join(sl_save_path, signal_file_name)
            np.savetxt(signal_save_path, sl_model_signal)
            #################

            ## Error Calculations
            sl_mse = sl_model_fit.mean_squared_error(sl_data)
            ##

            new_params = {}
            new_params['mse'] = sl_mse.tolist()

            for key, value in sl_fitted_params.items():
                new_params[key] = value.tolist()

            model_save_path = os.path.join(sl_save_path, model_file_name)

            with open(model_save_path, 'w') as json_file:
                json.dump(new_params, json_file)
            json_file.close()
            ######################

            ######## VC ##########
            vc_model_fit = multi_compat_model.fit(
                Acq_Scheme,
                vc_data,
                use_parallel_processing=False,
                solver='mix')
            vc_fitted_params = vc_model_fit.fitted_parameters
            vc_model_signal = vc_model_fit.predict()

            ## Save VC Signal
            vc_model_signal = vc_model_signal[0, :].tolist()
            signal_save_path = os.path.join(vc_save_path, signal_file_name)
            np.savetxt(signal_save_path, vc_model_signal)
            #################

            ## Error Calculations
            vc_mse = vc_model_fit.mean_squared_error(vc_data)
            ##

            new_params = {}
            new_params['mse'] = vc_mse.tolist()

            for key, value in vc_fitted_params.items():
                new_params[key] = value.tolist()

            model_save_path = os.path.join(vc_save_path, model_file_name)

            with open(model_save_path, 'w') as json_file:
                json.dump(new_params, json_file)
            json_file.close()
            ######################

            ######## VH #########
            vh_model_fit = multi_compat_model.fit(
                Acq_Scheme,
                vh_data,
                use_parallel_processing=False,
                solver='mix')
            vh_fitted_params = vh_model_fit.fitted_parameters
            vh_model_signal = vh_model_fit.predict()

            ## Save VH Signal
            vh_model_signal = vh_model_signal[0, :].tolist()
            signal_save_path = os.path.join(vh_save_path, signal_file_name)
            np.savetxt(signal_save_path, vh_model_signal)
            #################

            ## Error Calculations
            vh_mse = vh_model_fit.mean_squared_error(vh_data)
            ##

            new_params = {}
            new_params['mse'] = vh_mse.tolist()

            for key, value in vh_fitted_params.items():
                new_params[key] = value.tolist()

            model_save_path = os.path.join(vh_save_path, model_file_name)

            with open(model_save_path, 'w') as json_file:
                json.dump(new_params, json_file)
            json_file.close()
            ######################

            print('Model Completed wit Combination of {} and {}'.format(
                cyn_key, gsn_key))

    # TODO Triple Combinations of Intra, Extra and Rest
    for cyn_key, cyn_val in cylinder_dict.items():
        for gsn_key, gsn_val in gaussian_dict.items():
            for sph_key, sph_val in sphere_dict.items():

                cylinder = cyn_val()
                gaussian = gsn_val()
                sphere = sph_val()

                multi_compat_model = MultiCompartmentModel(
                    models=[cylinder, gaussian, sphere])
                print(multi_compat_model.parameter_names)

                # TODO If more than two mu exist, implies multiple orientation based measures exist
                # Hence for them we will identify them and set them to be equal to each other.
                mu_list = []
                for each_para_name in multi_compat_model.parameter_names:
                    # Last three characters of parameter
                    mu_type = each_para_name[-2:]
                    if mu_type == 'mu':
                        mu_list.append(each_para_name)
                        # multi_compat_model.set_fixed_parameter(each_para_name, 1.7e-9)

                if len(mu_list) == 2:
                    multi_compat_model.set_equal_parameter(
                        mu_list[0], mu_list[1])
                # End of mu conditions

                # This file name is common to all voxels and describes the nomenclature
                # as the selection of models that were used based on the three components
                model_file_name = cyn_key + '_' + gsn_key + '_' + sph_key + '.json'
                signal_file_name = cyn_key + '_' + gsn_key + '_' + sph_key + '_signal.txt'

                ######## FC #########
                fc_model_fit = multi_compat_model.fit(
                    Acq_Scheme,
                    fc_data,
                    use_parallel_processing=False,
                    solver='mix')
                fc_fitted_params = fc_model_fit.fitted_parameters
                fc_model_signal = fc_model_fit.predict()

                ## Save FC Signal
                fc_model_signal = fc_model_signal[0, :].tolist()
                signal_save_path = os.path.join(fc_save_path, signal_file_name)
                np.savetxt(signal_save_path, fc_model_signal)
                #################

                ## Error Calculations
                fc_mse = fc_model_fit.mean_squared_error(fc_data)
                ##

                new_params = {}
                new_params['mse'] = fc_mse.tolist()

                for key, value in fc_fitted_params.items():
                    new_params[key] = value.tolist()

                model_save_path = os.path.join(fc_save_path, model_file_name)

                with open(model_save_path, 'w') as json_file:
                    json.dump(new_params, json_file)
                json_file.close()
                #####################

                ######## LC #########
                lc_model_fit = multi_compat_model.fit(
                    Acq_Scheme,
                    lc_data,
                    use_parallel_processing=False,
                    solver='mix')
                lc_fitted_params = lc_model_fit.fitted_parameters
                lc_model_signal = lc_model_fit.predict()

                ## Save LC Signal
                lc_model_signal = lc_model_signal[0, :].tolist()
                signal_save_path = os.path.join(lc_save_path, signal_file_name)
                np.savetxt(signal_save_path, lc_model_signal)
                #################

                ## Error Calculations
                lc_mse = lc_model_fit.mean_squared_error(lc_data)
                ##

                new_params = {}
                new_params['mse'] = lc_mse.tolist()

                for key, value in lc_fitted_params.items():
                    new_params[key] = value.tolist()

                model_save_path = os.path.join(lc_save_path, model_file_name)

                with open(model_save_path, 'w') as json_file:
                    json.dump(new_params, json_file)
                json_file.close()
                #####################

                ######## SL #########
                sl_model_fit = multi_compat_model.fit(
                    Acq_Scheme,
                    sl_data,
                    use_parallel_processing=False,
                    solver='mix')
                sl_fitted_params = sl_model_fit.fitted_parameters
                sl_model_signal = sl_model_fit.predict()

                ## Save SL Signal
                sl_model_signal = sl_model_signal[0, :].tolist()
                signal_save_path = os.path.join(sl_save_path, signal_file_name)
                np.savetxt(signal_save_path, sl_model_signal)
                #################

                ## Error Calculations
                sl_mse = sl_model_fit.mean_squared_error(sl_data)
                ##

                new_params = {}
                new_params['mse'] = sl_mse.tolist()

                for key, value in sl_fitted_params.items():
                    new_params[key] = value.tolist()

                model_save_path = os.path.join(sl_save_path, model_file_name)

                with open(model_save_path, 'w') as json_file:
                    json.dump(new_params, json_file)
                json_file.close()
                ######################

                ######## VC ##########
                vc_model_fit = multi_compat_model.fit(
                    Acq_Scheme,
                    vc_data,
                    use_parallel_processing=False,
                    solver='mix')
                vc_fitted_params = vc_model_fit.fitted_parameters
                vc_model_signal = vc_model_fit.predict()

                ## Save VC Signal
                vc_model_signal = vc_model_signal[0, :].tolist()
                signal_save_path = os.path.join(vc_save_path, signal_file_name)
                np.savetxt(signal_save_path, vc_model_signal)
                #################

                ## Error Calculations
                vc_mse = vc_model_fit.mean_squared_error(vc_data)
                ##

                new_params = {}
                new_params['mse'] = vc_mse.tolist()

                for key, value in vc_fitted_params.items():
                    new_params[key] = value.tolist()

                model_save_path = os.path.join(vc_save_path, model_file_name)

                with open(model_save_path, 'w') as json_file:
                    json.dump(new_params, json_file)
                json_file.close()
                ######################

                ######## VH #########
                vh_model_fit = multi_compat_model.fit(
                    Acq_Scheme,
                    vh_data,
                    use_parallel_processing=False,
                    solver='mix')
                vh_fitted_params = vh_model_fit.fitted_parameters
                vh_model_signal = vh_model_fit.predict()

                ## Save VH Signal
                vh_model_signal = vh_model_signal[0, :].tolist()
                signal_save_path = os.path.join(vh_save_path, signal_file_name)
                np.savetxt(signal_save_path, vh_model_signal)
                #################

                ## Error Calculations
                vh_mse = vh_model_fit.mean_squared_error(vh_data)
                ##

                new_params = {}
                new_params['mse'] = vh_mse.tolist()

                for key, value in vh_fitted_params.items():
                    new_params[key] = value.tolist()

                model_save_path = os.path.join(vh_save_path, model_file_name)

                with open(model_save_path, 'w') as json_file:
                    json.dump(new_params, json_file)
                json_file.close()
                ######################

                print('Model Completed with Combination of {} and {} and {}'.
                      format(cyn_key, gsn_key, sph_key))

    print('All Done')
# Concatenante Bvals, Bvecs and Data
all_bvecs = np.hstack((b1k_bvecs, b2k_bvecs))
all_bvals = np.hstack((b1k_bvals, b2k_bvals))
all_data = np.concatenate((b1k_fdata, b2k_fdata), axis=3)

# Prepare Dmipy Acquisition Scheme
# This is Dmipy nonsense, that if B-values are in s/mm^2 they need
# to be multiplied with 1e6 to be of the scale s/m^2

all_bvals = all_bvals * 1e6
all_bvecs = np.transpose(all_bvecs)

# The below line also takes in small delta and big delta.
# TODO Big Delta and small delta are not available quite often for the data.
acq_scheme = acquisition_scheme_from_bvalues(all_bvals, all_bvecs)

# We are ready to fit models
# Prepare SMT Model
zeppelin = gaussian_models.G2Zeppelin()
smt_mod = modeling_framework.MultiCompartmentSphericalMeanModel(
    models=[zeppelin])
#smt_mod.set_fractional_parameter()

# Fit SMT
smt_fit_hcp = smt_mod.fit(acq_scheme,
                          all_data,
                          Ns=30,
                          mask=all_data[..., 0] > 0,
                          use_parallel_processing=False)
def main():
    #Argparse Stuff
    parser = argparse.ArgumentParser(description='subject_id')
    parser.add_argument('--subject_id', type=str, default='135124')
    args = parser.parse_args()

    # Plot Save Path
    base_plot_path = r'/nfs/masi/nathv/py_src_code_2020/dmipy_model_pictures'
    base_plot_path = os.path.normpath(base_plot_path)

    # Method Saving Paths
    # TODO KARTHIK
    base_save_path = r'/root/hcp_results'
    base_save_path = os.path.normpath(base_save_path)
    if os.path.exists(base_save_path)==False:
        os.mkdir(base_save_path)

    # Create base saving path for Method
    # TODO The Method name can be made an argument later on
    method_name = 'NODDI_WATSON'

    # Base HCP Data Path
    # TODO KARTHIK This is where we hard set HCP's Data Path
    base_data_path = r'/root/local_mount/data'
    base_data_path = os.path.normpath(base_data_path)

    # Subject ID
    subj_ID = args.subject_id

    # Subject Save Path
    subj_save_path = os.path.join(base_save_path, subj_ID)
    if os.path.exists(subj_save_path)==False:
        os.mkdir(subj_save_path)

    # TODO For later the subject data, bval and bvec reading part can be put inside a function
    subj_data_path = os.path.join(base_data_path, subj_ID, 'T1w', 'Diffusion')

    # Read the Nifti file, bvals and bvecs
    subj_bvals = np.loadtxt(os.path.join(subj_data_path, 'bvals'))
    subj_bvecs = np.loadtxt(os.path.join(subj_data_path, 'bvecs'))

    all_bvals = subj_bvals * 1e6
    all_bvecs = np.transpose(subj_bvecs)

    subj_Acq_Scheme = acquisition_scheme_from_bvalues(all_bvals, all_bvecs)
    print(subj_Acq_Scheme.print_acquisition_info)

    print('Loading the Nifti Data ...')
    data_start_time = time.time()

    subj_babel_object = nib.load(os.path.join(subj_data_path, 'data.nii.gz'))
    subj_data = subj_babel_object.get_fdata()
    axial_slice_data = subj_data[50:65, 50:65, 60:62, :]

    data_end_time = time.time()
    data_time = np.int(np.round(data_end_time - data_start_time))

    print('Data Loaded ... Time Taken: {}'.format(data_end_time - data_start_time))
    print('The Data Dimensions are: {}'.format(subj_data.shape))

    #### NODDI Watson ####
    ball = gaussian_models.G1Ball()
    stick = cylinder_models.C1Stick()
    zeppelin = gaussian_models.G2Zeppelin()

    watson_dispersed_bundle = SD1WatsonDistributed(models=[stick, zeppelin])

    watson_dispersed_bundle.set_tortuous_parameter('G2Zeppelin_1_lambda_perp', 'C1Stick_1_lambda_par',
                                                   'partial_volume_0')
    watson_dispersed_bundle.set_equal_parameter('G2Zeppelin_1_lambda_par', 'C1Stick_1_lambda_par')
    watson_dispersed_bundle.set_fixed_parameter('G2Zeppelin_1_lambda_par', 1.7e-9)

    NODDI_mod = MultiCompartmentModel(models=[ball, watson_dispersed_bundle])
    NODDI_mod.set_fixed_parameter('G1Ball_1_lambda_iso', 3e-9)

    print('Fitting the NODDI Model ...')
    fit_start_time = time.time()
    NODDI_fit_hcp = NODDI_mod.fit(subj_Acq_Scheme,
                                  subj_data,
                                  mask=subj_data[..., 0] > 0,
                                  use_parallel_processing=True,
                                  number_of_processors=32)
    fit_end_time = time.time()
    print('Model Fitting Completed ... Time Taken to fit: {}'.format(fit_end_time - fit_start_time))
    fit_time = np.int(np.round(fit_end_time - fit_start_time))

    fitted_parameters = NODDI_fit_hcp.fitted_parameters

    para_Names_list = []
    for key, value in fitted_parameters.items():
        para_Names_list.append(key)

    ### Nifti Saving Part
    # Create a directory per subject
    subj_method_save_path = os.path.join(subj_save_path, method_name)
    if os.path.exists(subj_method_save_path)==False:
        os.mkdir(subj_method_save_path)

    # Retrieve the affine from already Read Nifti file to form the header
    affine = subj_babel_object.affine

    # Loop over fitted parameters name list
    for each_fitted_parameter in para_Names_list:
        new_img = nib.Nifti1Image(fitted_parameters[each_fitted_parameter], affine)

        # Form the file path
        f_name = each_fitted_parameter + '.nii.gz'
        param_file_path = os.path.join(subj_method_save_path, f_name)

        nib.save(new_img, param_file_path)

    return None
Example #23
0
def main():
    #Argparse Stuff
    parser = argparse.ArgumentParser(description='subject_id')
    parser.add_argument('--subject_id', type=str, default='135124')
    args = parser.parse_args()

    # Plot Save Path
    base_plot_path = r'/nfs/masi/nathv/py_src_code_2020/dmipy_model_pictures'
    base_plot_path = os.path.normpath(base_plot_path)

    # Method Saving Paths
    # TODO KARTHIK
    base_save_path = r'/root/hcp_results'
    base_save_path = os.path.normpath(base_save_path)
    if os.path.exists(base_save_path) == False:
        os.mkdir(base_save_path)

    # Create base saving path for Method
    # TODO The Method name can be made an argument later on
    method_name = 'VERDICT'

    # Base HCP Data Path
    # TODO KARTHIK This is where we hard set HCP's Data Path
    base_data_path = r'/root/local_mount/data'
    base_data_path = os.path.normpath(base_data_path)

    # Subject ID
    subj_ID = args.subject_id

    # Subject Save Path
    subj_save_path = os.path.join(base_save_path, subj_ID)
    if os.path.exists(subj_save_path) == False:
        os.mkdir(subj_save_path)

    # TODO For later the subject data, bval and bvec reading part can be put inside a function
    subj_data_path = os.path.join(base_data_path, subj_ID, 'T1w', 'Diffusion')

    # Read the Nifti file, bvals and bvecs
    subj_bvals = np.loadtxt(os.path.join(subj_data_path, 'bvals'))
    subj_bvecs = np.loadtxt(os.path.join(subj_data_path, 'bvecs'))

    all_bvals = subj_bvals * 1e6
    all_bvecs = np.transpose(subj_bvecs)

    subj_Acq_Scheme = acquisition_scheme_from_bvalues(all_bvals,
                                                      all_bvecs,
                                                      delta=10.6 * 1e-3,
                                                      Delta=43.1 * 1e-3,
                                                      TE=89.5 * 1e-3)

    print(subj_Acq_Scheme.print_acquisition_info)

    print('Loading the Nifti Data ...')
    data_start_time = time.time()

    subj_babel_object = nib.load(os.path.join(subj_data_path, 'data.nii.gz'))
    subj_data = subj_babel_object.get_fdata()
    #axial_slice_data = subj_data[55:60, 65:70, 60:62, :]

    mask_babel_object = nib.load(
        os.path.join(subj_data_path, 'nodif_brain_mask.nii.gz'))
    mask_data = mask_babel_object.get_fdata()

    data_end_time = time.time()
    data_time = np.int(np.round(data_end_time - data_start_time))

    print('Data Loaded ... Time Taken: {}'.format(data_end_time -
                                                  data_start_time))
    print('The Data Dimensions are: {}'.format(subj_data.shape))

    #### Verdict Begin ####
    sphere = sphere_models.S4SphereGaussianPhaseApproximation(
        diffusion_constant=0.9e-9)
    ball = gaussian_models.G1Ball()
    stick = cylinder_models.C1Stick()

    verdict_mod = MultiCompartmentModel(models=[sphere, ball, stick])

    verdict_mod.set_fixed_parameter('G1Ball_1_lambda_iso', 0.9e-9)
    verdict_mod.set_parameter_optimization_bounds('C1Stick_1_lambda_par',
                                                  [3.05e-9, 10e-9])

    print('Fitting the Verdict Model ...')
    fit_start_time = time.time()
    mcdmi_fit = verdict_mod.fit(subj_Acq_Scheme,
                                subj_data,
                                mask=mask_data,
                                solver='mix',
                                use_parallel_processing=True,
                                number_of_processors=64)

    fit_end_time = time.time()
    print('Model Fitting Completed ... Time Taken to fit: {}'.format(
        fit_end_time - fit_start_time))
    fit_time = np.int(np.round(fit_end_time - fit_start_time))

    fitted_parameters = mcdmi_fit.fitted_parameters

    # Get List of Estimated Parameter Names
    para_Names_list = []
    for key, value in fitted_parameters.items():
        para_Names_list.append(key)

    ### Nifti Saving Part
    # Create a directory per subject
    subj_method_save_path = os.path.join(subj_save_path, method_name)
    if os.path.exists(subj_method_save_path) == False:
        os.mkdir(subj_method_save_path)

    # Retrieve the affine from already Read Nifti file to form the header
    affine = subj_babel_object.affine

    # Loop over fitted parameters name list
    for each_fitted_parameter in para_Names_list:
        new_img = nib.Nifti1Image(fitted_parameters[each_fitted_parameter],
                                  affine)

        # Form the file path
        f_name = each_fitted_parameter + '.nii.gz'
        param_file_path = os.path.join(subj_method_save_path, f_name)

        nib.save(new_img, param_file_path)

    return None
Example #24
0
for iMask in range(len(allMaskNames)):
    print "Processing subject", iMask

    print "Loading"
    
    gradient_directions = np.loadtxt(allBvecsNames[iMask])  # on the unit sphere
    
    if gradient_directions.shape[1] == 3:
        gradient_directions_normalized = normalized_vector(gradient_directions)
    else:
        gradient_directions_normalized = normalized_vector(gradient_directions.T)
    gradient_directions_normalized[np.isnan(gradient_directions_normalized)] = 1.0/np.sqrt(3)
    
    bvalues = np.loadtxt(allBvalsNames[iMask])  # given in s/mm^2
    bvalues_SI = bvalues * 1e6 
    acq_scheme = acquisition_scheme_from_bvalues(bvalues_SI, gradient_directions_normalized, delta, Delta)
    # gtab_dipy = gradient_table(bvalues, gradient_directions, big_delta=Delta, small_delta=delta, atol=3e-2)
    # acq_scheme = gtab_dipy2mipy(gtab_dipy)

    acq_scheme.print_acquisition_info

    dwi_nii = nib.load(allDwiNames[iMask])
    dwi = dwi_nii.get_data()
    mask = nib.load(allMaskNames[iMask]).get_data()

    ball = gaussian_models.G1Ball()
    cylinder = cylinder_models.C4CylinderGaussianPhaseApproximation()
    gamma_cylinder = distribute_models.DD1GammaDistributed(models=[cylinder])

    axcaliber_gamma = MultiCompartmentModel(models=[ball, gamma_cylinder])
    print axcaliber_gamma.parameter_cardinality
def main():
    #Argparse Stuff
    parser = argparse.ArgumentParser(description='subject_id')
    parser.add_argument('--subject_id', type=str, default='135124')
    args = parser.parse_args()

    # Plot Save Path
    base_plot_path = r'/nfs/masi/nathv/py_src_code_2020/dmipy_model_pictures'
    base_plot_path = os.path.normpath(base_plot_path)

    # Method Saving Paths
    # TODO KARTHIK
    base_save_path = r'/root/hcp_results'
    base_save_path = os.path.normpath(base_save_path)
    if os.path.exists(base_save_path)==False:
        os.mkdir(base_save_path)

    # Create base saving path for Method
    # TODO The Method name can be made an argument later on
    method_name = 'IVIM'

    # Base HCP Data Path
    # TODO KARTHIK This is where we hard set HCP's Data Path
    base_data_path = r'/root/local_mount/data'
    base_data_path = os.path.normpath(base_data_path)
    #base_data_path = args.input_path

    # Subject ID
    subj_ID = args.subject_id

    # Subject Save Path
    subj_save_path = os.path.join(base_save_path, subj_ID)
    if os.path.exists(subj_save_path)==False:
        os.mkdir(subj_save_path)

    # TODO For later the subject data, bval and bvec reading part can be put inside a function
    subj_data_path = os.path.join(base_data_path, subj_ID, 'T1w', 'Diffusion')

    # Read the Nifti file, bvals and bvecs
    subj_bvals = np.loadtxt(os.path.join(subj_data_path, 'bvals'))
    subj_bvecs = np.loadtxt(os.path.join(subj_data_path, 'bvecs'))

    all_bvals = subj_bvals * 1e6
    all_bvecs = np.transpose(subj_bvecs)

    subj_Acq_Scheme = acquisition_scheme_from_bvalues(all_bvals, all_bvecs)
    print(subj_Acq_Scheme.print_acquisition_info)

    print('Loading the Nifti Data ...')
    data_start_time = time.time()

    subj_babel_object = nib.load(os.path.join(subj_data_path, 'data.nii.gz'))
    subj_data = subj_babel_object.get_fdata()
    axial_slice_data = subj_data[:, :, 25:27, :]

    data_end_time = time.time()
    data_time = np.int(np.round(data_end_time - data_start_time))

    print('Data Loaded ... Time Taken: {}'.format(data_end_time - data_start_time))
    print('The Data Dimensions are: {}'.format(subj_data.shape))

    #### IVIM ####
    print('Fitting IVIM ...')
    fit_start_time = time.time()
    ivim_fit_dmipy_fixed = ivim_Dstar_fixed(subj_Acq_Scheme, subj_data, mask=subj_data[..., 0] > 0)
    fit_end_time = time.time()
    print('IVIM Fitting Completed ... Time Taken to fit: {}'.format(fit_end_time - fit_start_time))
    fit_time = np.int(np.round(fit_end_time - fit_start_time))

    # Get List of Estimated Parameter Names
    # para_Names_list = ivim_fit_dmipy_fixed.parameter_names

    fitted_parameters = ivim_fit_dmipy_fixed.fitted_parameters

    para_Names_list = []
    for key, value in fitted_parameters.items():
        para_Names_list.append(key)

    ### Nifti Saving Part
    # Create a directory per subject
    subj_method_save_path = os.path.join(subj_save_path, method_name)
    if os.path.exists(subj_method_save_path) == False:
        os.mkdir(subj_method_save_path)

    # Retrieve the affine from already Read Nifti file to form the header
    affine = subj_babel_object.affine

    # Loop over fitted parameters name list
    for each_fitted_parameter in para_Names_list:
        new_img = nib.Nifti1Image(fitted_parameters[each_fitted_parameter], affine)

        # Form the file path
        f_name = each_fitted_parameter + '.nii.gz'
        param_file_path = os.path.join(subj_method_save_path, f_name)

        nib.save(new_img, param_file_path)

    return None
def fit_noddi_dmipy(input_dwi,
                    input_bval,
                    input_bvec,
                    input_mask,
                    output_dir,
                    nthreads=1,
                    solver='brute2fine',
                    model_type='WATSON',
                    parallel_diffusivity=1.7e-9,
                    iso_diffusivity=3e-9,
                    bids_fmt=False,
                    bids_id=''):

    import nibabel as nib
    from dmipy.signal_models import cylinder_models, gaussian_models
    from dmipy.distributions.distribute_models import SD1WatsonDistributed, SD2BinghamDistributed
    from dmipy.core.modeling_framework import MultiCompartmentModel
    from dmipy.core import modeling_framework
    from dmipy.core.acquisition_scheme import acquisition_scheme_from_bvalues
    from dipy.io import read_bvals_bvecs

    if not os.path.exists(output_dir):
        os.mkdir(output_dir)

    #Setup the acquisition scheme
    bvals, bvecs = read_bvals_bvecs(input_bval, input_bvec)
    bvals_SI = bvals * 1e6
    acq_scheme = acquisition_scheme_from_bvalues(bvals_SI, bvecs)
    acq_scheme.print_acquisition_info

    #Load the data
    img = nib.load(input_dwi)
    data = img.get_data()

    #Load the mask
    img = nib.load(input_mask)
    mask_data = img.get_data()

    ball = gaussian_models.G1Ball()  #CSF
    stick = cylinder_models.C1Stick()  #Intra-axonal diffusion
    zeppelin = gaussian_models.G2Zeppelin()  #Extra-axonal diffusion

    if model_type == 'Bingham' or model_type == 'BINGHAM':
        dispersed_bundle = SD2BinghamDistributed(models=[stick, zeppelin])
    else:
        dispersed_bundle = SD1WatsonDistributed(models=[stick, zeppelin])

    dispersed_bundle.set_tortuous_parameter('G2Zeppelin_1_lambda_perp',
                                            'C1Stick_1_lambda_par',
                                            'partial_volume_0')
    dispersed_bundle.set_equal_parameter('G2Zeppelin_1_lambda_par',
                                         'C1Stick_1_lambda_par')
    dispersed_bundle.set_fixed_parameter('G2Zeppelin_1_lambda_par',
                                         parallel_diffusivity)

    NODDI_mod = MultiCompartmentModel(models=[ball, dispersed_bundle])
    NODDI_mod.set_fixed_parameter('G1Ball_1_lambda_iso', iso_diffusivity)
    NODDI_fit = NODDI_mod.fit(acq_scheme,
                              data,
                              mask=mask_data,
                              number_of_processors=nthreads,
                              solver=solver)

    fitted_parameters = NODDI_fit.fitted_parameters

    if model_type == 'Bingham' or model_type == 'BINGHAM':
        # get total Stick signal contribution
        vf_intra = (
            fitted_parameters['SD2BinghamDistributed_1_partial_volume_0'] *
            fitted_parameters['partial_volume_1'])

        # get total Zeppelin signal contribution
        vf_extra = (
            (1 - fitted_parameters['SD2BinghamDistributed_1_partial_volume_0'])
            * fitted_parameters['partial_volume_1'])
        vf_iso = fitted_parameters['partial_volume_0']
        odi = fitted_parameters['SD2BinghamDistributed_1_SD2Bingham_1_odi']

    else:
        # get total Stick signal contribution
        vf_intra = (
            fitted_parameters['SD1WatsonDistributed_1_partial_volume_0'] *
            fitted_parameters['partial_volume_1'])

        # get total Zeppelin signal contribution
        vf_extra = (
            (1 - fitted_parameters['SD1WatsonDistributed_1_partial_volume_0'])
            * fitted_parameters['partial_volume_1'])
        vf_iso = fitted_parameters['partial_volume_0']
        odi = fitted_parameters['SD1WatsonDistributed_1_SD1Watson_1_odi']

    if bids_fmt:
        output_odi = output_dir + '/' + bids_id + '_model-NODDI_parameter-ODI.nii.gz'
        output_vf_intra = output_dir + '/' + bids_id + '_model-NODDI_parameter-ICVF.nii.gz'
        output_vf_extra = output_dir + '/' + bids_id + '_model-NODDI_parameter-EXVF.nii.gz'
        output_vf_iso = output_dir + '/' + bids_id + '_model-NODDI_parameter-ISO.nii.gz'
    else:
        output_odi = output_dir + '/noddi_ODI.nii.gz'
        output_vf_intra = output_dir + '/noddi_ICVF.nii.gz'
        output_vf_extra = output_dir + '/noddi_EXVF.nii.gz'
        output_vf_iso = output_dir + '/noddi_ISO.nii.gz'

    #Save the images
    odi_img = nib.Nifti1Image(odi, img.get_affine(), img.header)
    odi_img.set_sform(img.get_sform())
    odi_img.set_qform(img.get_qform())
    nib.save(odi_img, output_odi)

    icvf_img = nib.Nifti1Image(vf_intra, img.get_affine(), img.header)
    icvf_img.set_sform(img.get_sform())
    icvf_img.set_qform(img.get_qform())
    nib.save(icvf_img, output_vf_intra)

    ecvf_img = nib.Nifti1Image(vf_extra, img.get_affine(), img.header)
    ecvf_img.set_sform(img.get_sform())
    ecvf_img.set_qform(img.get_qform())
    nib.save(ecvf_img, output_vf_extra)

    iso_img = nib.Nifti1Image(vf_iso, img.get_affine(), img.header)
    iso_img.set_sform(img.get_sform())
    iso_img.set_qform(img.get_qform())
    nib.save(iso_img, output_vf_iso)
Example #27
0
def main():

    # Plot Save Path
    base_plot_path = r'/nfs/masi/nathv/py_src_code_2020/dmipy_model_pictures'
    base_plot_path = os.path.normpath(base_plot_path)

    # Base HCP Data Path
    base_data_path = r'/nfs/HCP/data'
    base_data_path = os.path.normpath(base_data_path)

    # Subject ID's list
    subj_ID_List = ['125525', '118225', '116726']

    # TODO When needed loop here over the ID list
    subj_data_path = os.path.join(base_data_path, subj_ID_List[0], 'T1w',
                                  'Diffusion')

    # Read the Nifti file, bvals and bvecs
    subj_bvals = np.loadtxt(os.path.join(subj_data_path, 'bvals'))
    subj_bvecs = np.loadtxt(os.path.join(subj_data_path, 'bvecs'))

    all_bvals = subj_bvals * 1e6
    all_bvecs = np.transpose(subj_bvecs)

    subj_Acq_Scheme = acquisition_scheme_from_bvalues(all_bvals, all_bvecs)
    print(subj_Acq_Scheme.print_acquisition_info)

    print('Loading the Nifti Data ...')
    data_start_time = time.time()

    subj_babel_object = nib.load(os.path.join(subj_data_path, 'data.nii.gz'))
    subj_data = subj_babel_object.get_fdata()
    axial_slice_data = subj_data[:, :, 65, :]

    data_end_time = time.time()
    data_time = np.int(np.round(data_end_time - data_start_time))

    print('Data Loaded ... Time Taken: {}'.format(data_end_time -
                                                  data_start_time))
    print('The Data Dimensions are: {}'.format(subj_data.shape))

    # DMIPY Model Stuff
    '''
    #### Ball & Stick ####
    ball = gaussian_models.G1Ball()
    stick = cylinder_models.C1Stick()
    BAS_mod = MultiCompartmentModel(models=[stick, ball])

    print('Fitting the Ball & Stick Model ...')
    fit_start_time = time.time()
    BAS_fit_hcp = BAS_mod.fit(subj_Acq_Scheme, axial_slice_data, mask=axial_slice_data[..., 0] > 0)
    fit_end_time = time.time()
    print('Model Fitting Completed ... Time Taken to fit: {}'.format(fit_end_time - fit_start_time))
    fit_time = np.int(np.round(fit_end_time - fit_start_time))

    fitted_parameters = BAS_fit_hcp.fitted_parameters

    fig, axs = plt.subplots(2, 2, figsize=[10, 10])
    axs = axs.ravel()

    counter = 0
    for name, values in fitted_parameters.items():
        if values.squeeze().ndim != 2:
            continue
        cf = axs[counter].imshow(values.squeeze().T, origin=True, interpolation='nearest')
        axs[counter].set_title(name)
        axs[counter].set_axis_off()
        fig.colorbar(cf, ax=axs[counter], shrink=0.8)
        counter += 1

    bs_plt_name = 'ball_stick_behrens_{}.png'.format(fit_time)
    plt.savefig(os.path.join(base_plot_path, bs_plt_name))
    plt.clf()
    #### End of Ball & Stick ####
    '''
    '''
    #### Ball & Racket ####

    ball = gaussian_models.G1Ball()
    stick = cylinder_models.C1Stick()
    dispersed_stick = SD2BinghamDistributed([stick])
    BAR_mod = MultiCompartmentModel(models=[dispersed_stick, ball])

    # Parameter Fixing makes the model run faster
    BAR_mod.set_fixed_parameter("SD2BinghamDistributed_1_C1Stick_1_lambda_par", 1.7e-9)

    print('Fitting the Ball & Racket Model ...')
    fit_start_time = time.time()
    BAR_fit_hcp = BAR_mod.fit(subj_Acq_Scheme, axial_slice_data, mask=axial_slice_data[..., 0] > 0)
    fit_end_time = time.time()
    print('Model Fitting Completed ... Time Taken to fit: {}'.format(fit_end_time - fit_start_time))
    fit_time = np.int(np.round(fit_end_time - fit_start_time))

    fitted_parameters = BAR_fit_hcp.fitted_parameters

    fig, axs = plt.subplots(2, 3, figsize=[15, 10])
    axs = axs.ravel()

    counter = 0
    for name, values in fitted_parameters.items():
        if values.squeeze().ndim != 2:
            continue
        cf = axs[counter].imshow(values.squeeze().T, origin=True, interpolation='nearest')
        axs[counter].set_title(name)
        fig.colorbar(cf, ax=axs[counter], shrink=0.5)
        counter += 1

    br_plt_name = 'ball_racket_{}.png'.format(fit_time)
    plt.savefig(os.path.join(base_plot_path, br_plt_name))
    plt.clf()

    #### End of Ball & Racket Stuff ####
    '''
    '''
    #### IVIM ####
    print('Fitting IVIM ...')
    fit_start_time = time.time()
    ivim_fit_dmipy_fixed = ivim_Dstar_fixed(subj_Acq_Scheme, axial_slice_data, mask=axial_slice_data[..., 0] > 0)
    fit_end_time = time.time()
    print('IVIM Fitting Completed ... Time Taken to fit: {}'.format(fit_end_time - fit_start_time))
    fit_time = np.int(np.round(fit_end_time - fit_start_time))

    fig, axs = plt.subplots(nrows=2, ncols=2, figsize=[10, 10])
    axs = axs.ravel()
    axs[0].set_title('Dmipy Dstar-Fixed', fontsize=18)
    axs[0].set_ylabel('S0-Predicted', fontsize=15)
    axs[1].set_ylabel('perfusion fraction', fontsize=15)
    axs[2].set_ylabel('D_star (perfusion)', fontsize=15)
    axs[3].set_ylabel('D (diffusion)', fontsize=15)

    args = {'vmin': 0., 'interpolation': 'nearest'}
    im0 = axs[0].imshow(ivim_fit_dmipy_fixed.S0, **args)
    im1 = axs[1].imshow(ivim_fit_dmipy_fixed.fitted_parameters['partial_volume_1'], vmax=1., **args)
    im2 = axs[2].imshow(np.ones_like(ivim_fit_dmipy_fixed.S0) *
                        ivim_fit_dmipy_fixed.fitted_and_linked_parameters['G1Ball_2_lambda_iso'] * 1e9, vmax=20, **args)
    axs[2].text(10, 10, 'Fixed to 7e-9 mm$^2$/s', fontsize=14, color='white')
    im3 = axs[3].imshow(ivim_fit_dmipy_fixed.fitted_parameters['G1Ball_1_lambda_iso'] * 1e9, vmax=6, **args)

    for im, ax in zip([im0, im1, im2, im3], axs):
        fig.colorbar(im, ax=ax, shrink=0.7)

    ivim_plt_name = 'ivim_{}.png'.format(fit_time)
    plt.savefig(os.path.join(base_plot_path, ivim_plt_name))
    plt.clf()

    #### End of IVIM ####
    '''

    ## TODO Investigate this bug, dimension error while fitting response functions
    #### MSMT-CSD ####
    S0_tissue_responses, tissue_response_models, selection_map = three_tissue_response_dhollander16(
        subj_Acq_Scheme,
        axial_slice_data,
        wm_algorithm='tournier13',
        wm_N_candidate_voxels=150,
        gm_perc=0.2,
        csf_perc=0.4)
    TR2_wm, TR1_gm, TR1_csf = tissue_response_models
    S0_wm, S0_gm, S0_csf = S0_tissue_responses

    fig, axs = plt.subplots(nrows=2, ncols=2, figsize=[10, 10])

    axs = axs.ravel()
    axs[0].set_title('Dmipy Dstar-Fixed', fontsize=18)

    args = {'vmin': 0., 'interpolation': 'nearest'}
    im0 = axs[0].imshow(axial_slice_data[:, :, 0, 0], origin=True)
    im0 = axs[0].imshow(selection_map.squeeze(), origin=True, alpha=0.8)

    plt.show()

    #### End of MSMT-CSD ####

    #### MC MDI CSD ####

    print('Debug here')
    return None
Example #28
0
def main():

    # Define Base Data Paths here
    base_data_path = r'/nfs/masi/nathv/spinal_cord_data_2020/SingleVoxelSignals/SingleVoxelSignals'
    base_data_path = os.path.normpath(base_data_path)

    # Define Saving Paths here
    save_data_path = r'/nfs/masi/nathv/spinal_cord_data_2020/results/intra_extra'
    save_data_path = os.path.normpath(save_data_path)
    if os.path.exists(save_data_path) == False:
        os.mkdir(save_data_path)

    # Scheme and The Directions file Paths
    scheme_path = os.path.join(base_data_path, 'scheme.scheme')
    bvecs_path = os.path.join(base_data_path, 'BVECS.bvec')
    bvals_path = os.path.join(base_data_path, 'BVALS.bval')

    # Voxel Paths
    voxel_fc_path = os.path.join(base_data_path, 'FasciulusCuneatus.txt')
    voxel_lc_path = os.path.join(base_data_path, 'LateralCST.txt')
    voxel_sl_path = os.path.join(base_data_path, 'SpinalLemniscus.txt')
    voxel_vc_path = os.path.join(base_data_path, 'VentralCST.txt')
    voxel_vh_path = os.path.join(base_data_path, 'VentralHorn.txt')

    # Reading the Scheme and the Directions
    scheme_data = np.loadtxt(scheme_path)
    bvecs_data = np.loadtxt(bvecs_path)
    bvals_data = np.loadtxt(bvals_path)

    # Read the voxel Data
    fc_data = []
    with open(voxel_fc_path) as csvfile:
        readcsv = csv.reader(csvfile, delimiter=',')
        for row in readcsv:
            fc_data.append(row)
    csvfile.close()
    fc_data = np.asarray(fc_data, dtype='float32')
    print('FC Voxel Shape: {}'.format(fc_data.shape))

    lc_data = []
    with open(voxel_lc_path) as csvfile:
        readcsv = csv.reader(csvfile, delimiter=',')
        for row in readcsv:
            lc_data.append(row)
    csvfile.close()
    lc_data = np.asarray(lc_data, dtype='float32')
    print('LC Voxel Shape: {}'.format(lc_data.shape))

    sl_data = []
    with open(voxel_sl_path) as csvfile:
        readcsv = csv.reader(csvfile, delimiter=',')
        for row in readcsv:
            sl_data.append(row)
    csvfile.close()
    sl_data = np.asarray(sl_data, dtype='float32')
    print('SL Voxel Shape: {}'.format(sl_data.shape))

    vc_data = []
    with open(voxel_vc_path) as csvfile:
        readcsv = csv.reader(csvfile, delimiter=',')
        for row in readcsv:
            vc_data.append(row)
    csvfile.close()
    vc_data = np.asarray(vc_data, dtype='float32')
    print('VC Voxel Shape: {}'.format(vc_data.shape))

    vh_data = []
    with open(voxel_vh_path) as csvfile:
        readcsv = csv.reader(csvfile, delimiter=',')
        for row in readcsv:
            vh_data.append(row)
    csvfile.close()
    vh_data = np.asarray(vh_data, dtype='float32')
    print('VH Voxel Shape: {}'.format(vh_data.shape))

    print('All Data Loaded ...')

    print('Constructing Acquisition Schemes')
    all_bvals = bvals_data * 1e6
    all_bvecs = np.transpose(bvecs_data)

    little_delta = scheme_data[:, 0]
    big_delta = scheme_data[:, 1]
    t_e = scheme_data[:, 4]

    Acq_Scheme = acquisition_scheme_from_bvalues(all_bvals,
                                                 all_bvecs,
                                                 delta=little_delta * 1e-3,
                                                 Delta=big_delta * 1e-3,
                                                 TE=t_e * 1e-3)

    cylinder_dict = {
        'C1': cyn.C1Stick,
        'C2': cyn.C2CylinderStejskalTannerApproximation,
        'C3': cyn.C3CylinderCallaghanApproximation,
        'C4': cyn.C4CylinderGaussianPhaseApproximation
    }

    gaussian_dict = {
        'G1': gsn.G1Ball,
        'G2': gsn.G2Zeppelin,
        'G3': gsn.G3TemporalZeppelin
    }

    # FC Saving path
    fc_save_path = os.path.join(save_data_path, 'FC')
    if os.path.exists(fc_save_path) == False:
        os.mkdir(fc_save_path)

    lc_save_path = os.path.join(save_data_path, 'LC')
    if os.path.exists(lc_save_path) == False:
        os.mkdir(lc_save_path)

    sl_save_path = os.path.join(save_data_path, 'SL')
    if os.path.exists(sl_save_path) == False:
        os.mkdir(sl_save_path)

    vc_save_path = os.path.join(save_data_path, 'VC')
    if os.path.exists(vc_save_path) == False:
        os.mkdir(vc_save_path)

    vh_save_path = os.path.join(save_data_path, 'VH')
    if os.path.exists(vh_save_path) == False:
        os.mkdir(vh_save_path)

    for cyn_key, cyn_val in cylinder_dict.items():
        for gsn_key, gsn_val in gaussian_dict.items():

            cylinder = cyn_val()
            gaussian = gsn_val()

            multi_compat_model = MultiCompartmentModel(
                models=[cylinder, gaussian])

            ######## FC #########
            fc_model_fit = multi_compat_model.fit(
                Acq_Scheme,
                fc_data,
                use_parallel_processing=False,
                solver='mix')
            fc_fitted_params = fc_model_fit.fitted_parameters

            ## Error Calculations
            fc_mse = fc_model_fit.mean_squared_error(fc_data)
            fc_R2 = fc_model_fit.R2_coefficient_of_determination(fc_data)
            ##

            new_params = {}
            new_params['mse'] = fc_mse.tolist()
            new_params['R2'] = fc_R2.tolist()

            for key, value in fc_fitted_params.items():
                new_params[key] = value.tolist()

            model_file_name = cyn_key + '_' + gsn_key + '.json'
            model_save_path = os.path.join(fc_save_path, model_file_name)

            with open(model_save_path, 'w') as json_file:
                json.dump(new_params, json_file)
            json_file.close()
            #####################

            ######## LC #########
            lc_model_fit = multi_compat_model.fit(
                Acq_Scheme,
                lc_data,
                use_parallel_processing=False,
                solver='mix')
            lc_fitted_params = lc_model_fit.fitted_parameters

            ## Error Calculations
            lc_mse = lc_model_fit.mean_squared_error(lc_data)
            lc_R2 = lc_model_fit.R2_coefficient_of_determination(lc_data)
            ##

            new_params = {}
            new_params['mse'] = lc_mse.tolist()
            new_params['R2'] = lc_R2.tolist()

            for key, value in lc_fitted_params.items():
                new_params[key] = value.tolist()

            model_file_name = cyn_key + '_' + gsn_key + '.json'
            model_save_path = os.path.join(lc_save_path, model_file_name)

            with open(model_save_path, 'w') as json_file:
                json.dump(new_params, json_file)
            json_file.close()
            ######################

            ######## SL #########
            sl_model_fit = multi_compat_model.fit(
                Acq_Scheme,
                sl_data,
                use_parallel_processing=False,
                solver='mix')
            sl_fitted_params = sl_model_fit.fitted_parameters

            ## Error Calculations
            sl_mse = sl_model_fit.mean_squared_error(sl_data)
            sl_R2 = sl_model_fit.R2_coefficient_of_determination(sl_data)
            ##

            new_params = {}
            new_params['mse'] = sl_mse.tolist()
            new_params['R2'] = sl_R2.tolist()

            for key, value in sl_fitted_params.items():
                new_params[key] = value.tolist()

            model_file_name = cyn_key + '_' + gsn_key + '.json'
            model_save_path = os.path.join(sl_save_path, model_file_name)

            with open(model_save_path, 'w') as json_file:
                json.dump(new_params, json_file)
            json_file.close()
            ######################

            ######## VC #########
            vc_model_fit = multi_compat_model.fit(
                Acq_Scheme,
                vc_data,
                use_parallel_processing=False,
                solver='mix')
            vc_fitted_params = vc_model_fit.fitted_parameters

            ## Error Calculations
            vc_mse = vc_model_fit.mean_squared_error(vc_data)
            vc_R2 = vc_model_fit.R2_coefficient_of_determination(vc_data)
            ##

            new_params = {}
            new_params['mse'] = vc_mse.tolist()
            new_params['R2'] = vc_R2.tolist()

            for key, value in vc_fitted_params.items():
                new_params[key] = value.tolist()

            model_file_name = cyn_key + '_' + gsn_key + '.json'
            model_save_path = os.path.join(vc_save_path, model_file_name)

            with open(model_save_path, 'w') as json_file:
                json.dump(new_params, json_file)
            json_file.close()
            ######################

            ######## VH #########
            vh_model_fit = multi_compat_model.fit(
                Acq_Scheme,
                vh_data,
                use_parallel_processing=False,
                solver='mix')
            vh_fitted_params = vh_model_fit.fitted_parameters

            ## Error Calculations
            vh_mse = vh_model_fit.mean_squared_error(vh_data)
            vh_R2 = vh_model_fit.R2_coefficient_of_determination(vh_data)
            ##

            new_params = {}
            new_params['mse'] = vh_mse.tolist()
            new_params['R2'] = vh_R2.tolist()

            for key, value in vh_fitted_params.items():
                new_params[key] = value.tolist()

            model_file_name = cyn_key + '_' + gsn_key + '.json'
            model_save_path = os.path.join(vh_save_path, model_file_name)

            with open(model_save_path, 'w') as json_file:
                json.dump(new_params, json_file)
            json_file.close()
            ######################

            print('Model Completed wit Combination of {} and {}'.format(
                cyn_key, gsn_key))

    print('All Done')

    return None
def main():

    # Base Path of all given files for All models are wrong
    base_path = r'/nfs/masi/nathv/memento_2020/all_models_are_wrong/files_project_2927_session_1436090/'
    base_path = os.path.normpath(base_path)

    # Just dealing with PGSE for now
    pgse_acq_params_path = os.path.join(base_path, 'PGSE_AcqParams.txt')
    pgse_signal_path = os.path.join(base_path, 'PGSE_Simulations.txt')

    # Read files via Numpy
    pgse_acq_params = np.loadtxt(pgse_acq_params_path)
    pgse_signal_data = np.loadtxt(pgse_signal_path)
    pgse_example_sub_diff = np.loadtxt(
        '/nfs/masi/nathv/memento_2020/all_models_are_wrong/files_project_2927_session_1436090/2-AllModelsAreWrong-ExampleSubmissions/DIffusivity-ExampleSubmission3/PGSE.txt'
    )
    pgse_example_sub_volfrac = np.loadtxt(
        '/nfs/masi/nathv/memento_2020/all_models_are_wrong/files_project_2927_session_1436090/2-AllModelsAreWrong-ExampleSubmissions/VolumeFraction-ExampleSubmission3/PGSE.txt'
    )

    # Transpose the Signal data
    pgse_signal_data = pgse_signal_data.transpose()

    # Dissect the acquisition parameters to form the Acquisition Table
    bvecs = pgse_acq_params[:, 1:4]
    bvals = pgse_acq_params[:, 6] * 1e6
    grad_str = pgse_acq_params[:, 0]
    small_del = pgse_acq_params[:, 4]
    big_del = pgse_acq_params[:, 5]

    subj_Acq_Scheme = acquisition_scheme_from_bvalues(bvals,
                                                      bvecs,
                                                      delta=small_del,
                                                      Delta=big_del)

    print(subj_Acq_Scheme.print_acquisition_info)

    #### NODDI Watson ####
    ball = gaussian_models.G1Ball()
    stick = cylinder_models.C1Stick()
    zeppelin = gaussian_models.G2Zeppelin()

    bingham_dispersed_bundle = SD2BinghamDistributed(models=[stick, zeppelin])

    bingham_dispersed_bundle.set_tortuous_parameter('G2Zeppelin_1_lambda_perp',
                                                    'C1Stick_1_lambda_par',
                                                    'partial_volume_0')
    bingham_dispersed_bundle.set_equal_parameter('G2Zeppelin_1_lambda_par',
                                                 'C1Stick_1_lambda_par')
    bingham_dispersed_bundle.set_fixed_parameter('G2Zeppelin_1_lambda_par',
                                                 1.7e-9)

    NODDI_bingham_mod = MultiCompartmentModel(
        models=[ball, bingham_dispersed_bundle])
    NODDI_bingham_mod.set_fixed_parameter('G1Ball_1_lambda_iso', 3e-9)

    print('Fitting the NODDI Model ...')
    fit_start_time = time.time()
    NODDI_fit_hcp = NODDI_bingham_mod.fit(subj_Acq_Scheme,
                                          pgse_signal_data,
                                          use_parallel_processing=True,
                                          number_of_processors=8)

    fit_end_time = time.time()
    print('Model Fitting Completed ... Time Taken to fit: {}'.format(
        fit_end_time - fit_start_time))
    fit_time = np.int(np.round(fit_end_time - fit_start_time))

    sub_1_pv0 = NODDI_fit_hcp.fitted_parameters['partial_volume_0']
    sub_2_pv1 = NODDI_fit_hcp.fitted_parameters['partial_volume_1']

    np.savetxt('noddi_bingham_pv0.txt', sub_1_pv0)
    np.savetxt('noddi_bingham_pv1.txt', sub_2_pv1)

    print('Debug here')

    return None
Example #30
0
def main():
    # Plot Save Path
    base_plot_path = r'/nfs/masi/nathv/py_src_code_2020/dmipy_model_pictures'
    base_plot_path = os.path.normpath(base_plot_path)

    # Method Saving Paths
    base_save_path = r'/nfs/masi/nathv/miccai_2020/micro_methods_hcp_mini'
    base_save_path = os.path.normpath(base_save_path)

    # Create base saving path for Method
    # TODO The Method name can be made an argument later on
    method_name = 'AXCALIBER'

    # Base HCP Data Path
    base_data_path = r'/nfs/HCP/data'
    base_data_path = os.path.normpath(base_data_path)

    # Subject ID's list
    subj_ID_List = ['125525', '118225', '116726', '115825', '115017', '114823']
    # TODO When needed loop here over the ID list
    for subj_ID in subj_ID_List:
        # Subject Save Path
        subj_save_path = os.path.join(base_save_path, subj_ID)
        if os.path.exists(subj_save_path) == False:
            os.mkdir(subj_save_path)

        # TODO For later the subject data, bval and bvec reading part can be put inside a function
        subj_data_path = os.path.join(base_data_path, subj_ID_List[0], 'T1w',
                                      'Diffusion')

        # Read the Nifti file, bvals and bvecs
        subj_bvals = np.loadtxt(os.path.join(subj_data_path, 'bvals'))
        subj_bvecs = np.loadtxt(os.path.join(subj_data_path, 'bvecs'))

        all_bvals = subj_bvals * 1e6
        all_bvecs = np.transpose(subj_bvecs)

        subj_Acq_Scheme = acquisition_scheme_from_bvalues(all_bvals,
                                                          all_bvecs,
                                                          delta=10.6 * 1e-3,
                                                          Delta=43.1 * 1e-3,
                                                          TE=89.5 * 1e-3)
        print(subj_Acq_Scheme.print_acquisition_info)

        print('Loading the Nifti Data ...')
        data_start_time = time.time()

        subj_babel_object = nib.load(
            os.path.join(subj_data_path, 'data.nii.gz'))
        subj_data = subj_babel_object.get_fdata()
        axial_slice_data = subj_data[58:60, 68:70, 60:62, :]

        mask_babel_object = nib.load(
            os.path.join(subj_data_path, 'nodif_brain_mask.nii.gz'))
        mask_data = mask_babel_object.get_fdata()
        axial_mask_slice_data = mask_data[58:60, 68:70, 60:62]

        data_end_time = time.time()
        data_time = np.int(np.round(data_end_time - data_start_time))

        print('Data Loaded ... Time Taken: {}'.format(data_end_time -
                                                      data_start_time))
        print('The Data Dimensions are: {}'.format(subj_data.shape))

        #### AxCaliber Begin ####
        ball = gaussian_models.G1Ball()
        cylinder = cylinder_models.C4CylinderGaussianPhaseApproximation()
        gamma_cylinder = distribute_models.DD1GammaDistributed(
            models=[cylinder])

        axcaliber_gamma = modeling_framework.MultiCompartmentModel(
            models=[ball, gamma_cylinder])

        axcaliber_gamma.set_fixed_parameter(
            'DD1GammaDistributed_1_C4CylinderGaussianPhaseApproximation_1_lambda_par',
            1.7e-9)
        axcaliber_gamma.set_fixed_parameter(
            'DD1GammaDistributed_1_C4CylinderGaussianPhaseApproximation_1_mu',
            [0, 0])

        print('Fitting the AxCaliber Model ...')
        fit_start_time = time.time()
        mcdmi_fit = axcaliber_gamma.fit(subj_Acq_Scheme,
                                        axial_slice_data,
                                        mask=axial_mask_slice_data,
                                        solver='mix',
                                        maxiter=100,
                                        use_parallel_processing=True,
                                        number_of_processors=32)

        fit_end_time = time.time()
        print('Model Fitting Completed ... Time Taken to fit: {}'.format(
            fit_end_time - fit_start_time))
        fit_time = np.int(np.round(fit_end_time - fit_start_time))

        fitted_parameters = mcdmi_fit.fitted_parameters

        # Get List of Estimated Parameter Names
        para_Names_list = []
        for key, value in fitted_parameters.items():
            para_Names_list.append(key)

        ### Nifti Saving Part
        # Create a directory per subject
        subj_method_save_path = os.path.join(subj_save_path, method_name)
        if os.path.exists(subj_method_save_path) == False:
            os.mkdir(subj_method_save_path)

        # Retrieve the affine from already Read Nifti file to form the header
        affine = subj_babel_object.affine

        # Loop over fitted parameters name list
        for each_fitted_parameter in para_Names_list:
            new_img = nib.Nifti1Image(fitted_parameters[each_fitted_parameter],
                                      affine)

            # Form the file path
            f_name = each_fitted_parameter + '.nii.gz'
            param_file_path = os.path.join(subj_method_save_path, f_name)

            nib.save(new_img, param_file_path)

        print('debug here')
    return None