Пример #1
0
def create_noddi_watson_model(lambda_iso_diff=3.e-9, lambda_par_diff=1.7e-9):
    """Creates NODDI mulit-compartment model with Watson distribution."""
    """
        Arguments:
            lambda_iso_diff: float
                isotropic diffusivity
            lambda_par_diff: float
                parallel diffusivity
        Returns: MultiCompartmentModel instance
            NODDI Watson multi-compartment model instance
    """
    ball = gaussian_models.G1Ball()
    stick = cylinder_models.C1Stick()
    zeppelin = gaussian_models.G2Zeppelin()
    watson_dispersed_bundle = SD1WatsonDistributed(models=[stick, zeppelin])
    watson_dispersed_bundle.set_tortuous_parameter('G2Zeppelin_1_lambda_perp',
                                                   'C1Stick_1_lambda_par',
                                                   'partial_volume_0')
    watson_dispersed_bundle.set_equal_parameter('G2Zeppelin_1_lambda_par',
                                                'C1Stick_1_lambda_par')
    watson_dispersed_bundle.set_fixed_parameter('G2Zeppelin_1_lambda_par',
                                                lambda_par_diff)

    NODDI_mod = MultiCompartmentModel(models=[ball, watson_dispersed_bundle])
    NODDI_mod.set_fixed_parameter('G1Ball_1_lambda_iso', lambda_iso_diff)

    return NODDI_mod
def main():

    # Base Path of all given files for All models are wrong
    base_path = r'/nfs/masi/nathv/memento_2020/all_models_are_wrong/files_project_2927_session_1436090/'
    base_path = os.path.normpath(base_path)

    # Just dealing with PGSE for now
    pgse_acq_params_path = os.path.join(base_path, 'PGSE_AcqParams.txt')
    pgse_signal_path = os.path.join(base_path, 'PGSE_Simulations.txt')

    # Read files via Numpy
    pgse_acq_params = np.loadtxt(pgse_acq_params_path)
    pgse_signal_data = np.loadtxt(pgse_signal_path)
    pgse_example_sub_diff = np.loadtxt(
        '/nfs/masi/nathv/memento_2020/all_models_are_wrong/files_project_2927_session_1436090/2-AllModelsAreWrong-ExampleSubmissions/DIffusivity-ExampleSubmission3/PGSE.txt'
    )
    pgse_example_sub_volfrac = np.loadtxt(
        '/nfs/masi/nathv/memento_2020/all_models_are_wrong/files_project_2927_session_1436090/2-AllModelsAreWrong-ExampleSubmissions/VolumeFraction-ExampleSubmission3/PGSE.txt'
    )

    # Transpose the Signal data
    pgse_signal_data = pgse_signal_data.transpose()

    # Dissect the acquisition parameters to form the Acquisition Table
    bvecs = pgse_acq_params[:, 1:4]
    bvals = pgse_acq_params[:, 6] * 1e6
    grad_str = pgse_acq_params[:, 0]
    small_del = pgse_acq_params[:, 4]
    big_del = pgse_acq_params[:, 5]

    subj_Acq_Scheme = acquisition_scheme_from_bvalues(bvals,
                                                      bvecs,
                                                      delta=small_del,
                                                      Delta=big_del)
    print(subj_Acq_Scheme.print_acquisition_info)

    #### NODDI Watson ####
    ball = gaussian_models.G1Ball()
    stick = cylinder_models.C1Stick()
    zeppelin = gaussian_models.G2Zeppelin()

    watson_dispersed_bundle = SD1WatsonDistributed(models=[stick, zeppelin])

    watson_dispersed_bundle.set_tortuous_parameter('G2Zeppelin_1_lambda_perp',
                                                   'C1Stick_1_lambda_par',
                                                   'partial_volume_0')
    watson_dispersed_bundle.set_equal_parameter('G2Zeppelin_1_lambda_par',
                                                'C1Stick_1_lambda_par')
    watson_dispersed_bundle.set_fixed_parameter('G2Zeppelin_1_lambda_par',
                                                1.7e-9)

    NODDI_mod = MultiCompartmentModel(models=[ball, watson_dispersed_bundle])
    NODDI_mod.set_fixed_parameter('G1Ball_1_lambda_iso', 3e-9)

    print('Fitting the NODDI Model ...')
    fit_start_time = time.time()
    NODDI_fit_hcp = NODDI_mod.fit(subj_Acq_Scheme,
                                  pgse_signal_data,
                                  use_parallel_processing=True,
                                  number_of_processors=8)
    fit_end_time = time.time()
    print('Model Fitting Completed ... Time Taken to fit: {}'.format(
        fit_end_time - fit_start_time))
    fit_time = np.int(np.round(fit_end_time - fit_start_time))

    sub_1_pv0 = NODDI_fit_hcp.fitted_parameters['partial_volume_0']
    sub_2_pv1 = NODDI_fit_hcp.fitted_parameters['partial_volume_1']

    np.savetxt('noddi_watson_pv0.txt', sub_1_pv0)
    np.savetxt('noddi_watson_pv1.txt', sub_2_pv1)

    print('Debug here')

    return None
def main():
    #Argparse Stuff
    parser = argparse.ArgumentParser(description='subject_id')
    parser.add_argument('--subject_id', type=str, default='135124')
    args = parser.parse_args()

    # Plot Save Path
    base_plot_path = r'/nfs/masi/nathv/py_src_code_2020/dmipy_model_pictures'
    base_plot_path = os.path.normpath(base_plot_path)

    # Method Saving Paths
    # TODO KARTHIK
    base_save_path = r'/root/hcp_results'
    base_save_path = os.path.normpath(base_save_path)
    if os.path.exists(base_save_path)==False:
        os.mkdir(base_save_path)

    # Create base saving path for Method
    # TODO The Method name can be made an argument later on
    method_name = 'NODDI_WATSON'

    # Base HCP Data Path
    # TODO KARTHIK This is where we hard set HCP's Data Path
    base_data_path = r'/root/local_mount/data'
    base_data_path = os.path.normpath(base_data_path)

    # Subject ID
    subj_ID = args.subject_id

    # Subject Save Path
    subj_save_path = os.path.join(base_save_path, subj_ID)
    if os.path.exists(subj_save_path)==False:
        os.mkdir(subj_save_path)

    # TODO For later the subject data, bval and bvec reading part can be put inside a function
    subj_data_path = os.path.join(base_data_path, subj_ID, 'T1w', 'Diffusion')

    # Read the Nifti file, bvals and bvecs
    subj_bvals = np.loadtxt(os.path.join(subj_data_path, 'bvals'))
    subj_bvecs = np.loadtxt(os.path.join(subj_data_path, 'bvecs'))

    all_bvals = subj_bvals * 1e6
    all_bvecs = np.transpose(subj_bvecs)

    subj_Acq_Scheme = acquisition_scheme_from_bvalues(all_bvals, all_bvecs)
    print(subj_Acq_Scheme.print_acquisition_info)

    print('Loading the Nifti Data ...')
    data_start_time = time.time()

    subj_babel_object = nib.load(os.path.join(subj_data_path, 'data.nii.gz'))
    subj_data = subj_babel_object.get_fdata()
    axial_slice_data = subj_data[50:65, 50:65, 60:62, :]

    data_end_time = time.time()
    data_time = np.int(np.round(data_end_time - data_start_time))

    print('Data Loaded ... Time Taken: {}'.format(data_end_time - data_start_time))
    print('The Data Dimensions are: {}'.format(subj_data.shape))

    #### NODDI Watson ####
    ball = gaussian_models.G1Ball()
    stick = cylinder_models.C1Stick()
    zeppelin = gaussian_models.G2Zeppelin()

    watson_dispersed_bundle = SD1WatsonDistributed(models=[stick, zeppelin])

    watson_dispersed_bundle.set_tortuous_parameter('G2Zeppelin_1_lambda_perp', 'C1Stick_1_lambda_par',
                                                   'partial_volume_0')
    watson_dispersed_bundle.set_equal_parameter('G2Zeppelin_1_lambda_par', 'C1Stick_1_lambda_par')
    watson_dispersed_bundle.set_fixed_parameter('G2Zeppelin_1_lambda_par', 1.7e-9)

    NODDI_mod = MultiCompartmentModel(models=[ball, watson_dispersed_bundle])
    NODDI_mod.set_fixed_parameter('G1Ball_1_lambda_iso', 3e-9)

    print('Fitting the NODDI Model ...')
    fit_start_time = time.time()
    NODDI_fit_hcp = NODDI_mod.fit(subj_Acq_Scheme,
                                  subj_data,
                                  mask=subj_data[..., 0] > 0,
                                  use_parallel_processing=True,
                                  number_of_processors=32)
    fit_end_time = time.time()
    print('Model Fitting Completed ... Time Taken to fit: {}'.format(fit_end_time - fit_start_time))
    fit_time = np.int(np.round(fit_end_time - fit_start_time))

    fitted_parameters = NODDI_fit_hcp.fitted_parameters

    para_Names_list = []
    for key, value in fitted_parameters.items():
        para_Names_list.append(key)

    ### Nifti Saving Part
    # Create a directory per subject
    subj_method_save_path = os.path.join(subj_save_path, method_name)
    if os.path.exists(subj_method_save_path)==False:
        os.mkdir(subj_method_save_path)

    # Retrieve the affine from already Read Nifti file to form the header
    affine = subj_babel_object.affine

    # Loop over fitted parameters name list
    for each_fitted_parameter in para_Names_list:
        new_img = nib.Nifti1Image(fitted_parameters[each_fitted_parameter], affine)

        # Form the file path
        f_name = each_fitted_parameter + '.nii.gz'
        param_file_path = os.path.join(subj_method_save_path, f_name)

        nib.save(new_img, param_file_path)

    return None
Пример #4
0
def main():

    # Define Base Data Paths here
    base_data_path = r'/nfs/masi/nathv/spinal_cord_data_2020/SingleVoxelSignals_norm/SingleVoxelSignals_norm'
    base_data_path = os.path.normpath(base_data_path)

    # Define Saving Paths here
    save_data_path = r'/nfs/masi/nathv/spinal_cord_data_2020/results_norm/intra_extra_rest_watson'
    save_data_path = os.path.normpath(save_data_path)
    if os.path.exists(save_data_path) == False:
        os.mkdir(save_data_path)

    # Scheme and The Directions file Paths
    scheme_path = os.path.join(base_data_path, 'scheme.scheme')
    bvecs_path = os.path.join(base_data_path, 'BVECS.bvec')
    bvals_path = os.path.join(base_data_path, 'BVALS.bval')

    # Voxel Paths
    voxel_fc_path = os.path.join(base_data_path, 'FasciulusCuneatus.txt')
    voxel_lc_path = os.path.join(base_data_path, 'LateralCST.txt')
    voxel_sl_path = os.path.join(base_data_path, 'SpinalLemniscus.txt')
    voxel_vc_path = os.path.join(base_data_path, 'VentralCST.txt')
    voxel_vh_path = os.path.join(base_data_path, 'VentralHorn.txt')

    # Reading the Scheme and the Directions
    scheme_data = np.loadtxt(scheme_path)
    bvecs_data = np.loadtxt(bvecs_path)
    bvals_data = np.loadtxt(bvals_path)

    # Read the voxel Data
    fc_data = []
    with open(voxel_fc_path) as csvfile:
        readcsv = csv.reader(csvfile, delimiter=',')
        for row in readcsv:
            fc_data.append(row)
    csvfile.close()
    fc_data = np.asarray(fc_data, dtype='float32')
    print('FC Voxel Shape: {}'.format(fc_data.shape))

    lc_data = []
    with open(voxel_lc_path) as csvfile:
        readcsv = csv.reader(csvfile, delimiter=',')
        for row in readcsv:
            lc_data.append(row)
    csvfile.close()
    lc_data = np.asarray(lc_data, dtype='float32')
    print('LC Voxel Shape: {}'.format(lc_data.shape))

    sl_data = []
    with open(voxel_sl_path) as csvfile:
        readcsv = csv.reader(csvfile, delimiter=',')
        for row in readcsv:
            sl_data.append(row)
    csvfile.close()
    sl_data = np.asarray(sl_data, dtype='float32')
    print('SL Voxel Shape: {}'.format(sl_data.shape))

    vc_data = []
    with open(voxel_vc_path) as csvfile:
        readcsv = csv.reader(csvfile, delimiter=',')
        for row in readcsv:
            vc_data.append(row)
    csvfile.close()
    vc_data = np.asarray(vc_data, dtype='float32')
    print('VC Voxel Shape: {}'.format(vc_data.shape))

    vh_data = []
    with open(voxel_vh_path) as csvfile:
        readcsv = csv.reader(csvfile, delimiter=',')
        for row in readcsv:
            vh_data.append(row)
    csvfile.close()
    vh_data = np.asarray(vh_data, dtype='float32')
    print('VH Voxel Shape: {}'.format(vh_data.shape))

    print('All Data Loaded ...')

    print('Constructing Acquisition Schemes')
    all_bvals = bvals_data * 1e6
    all_bvecs = np.transpose(bvecs_data)

    little_delta = scheme_data[:, 0]
    big_delta = scheme_data[:, 1]
    t_e = scheme_data[:, 4]

    Acq_Scheme = acquisition_scheme_from_bvalues(all_bvals,
                                                 all_bvecs,
                                                 delta=little_delta * 1e-3,
                                                 Delta=big_delta * 1e-3)

    cylinder_dict = {
        'C1': cyn.C1Stick,
        'C2': cyn.C2CylinderStejskalTannerApproximation,
        'C3': cyn.C3CylinderCallaghanApproximation,
        'C4': cyn.C4CylinderGaussianPhaseApproximation
    }

    gaussian_dict = {'G1': gsn.G1Ball, 'G2': gsn.G2Zeppelin}

    sphere_dict = {
        'S1': sph.S1Dot,
        'S2': sph.S2SphereStejskalTannerApproximation,
        'S4': sph.S4SphereGaussianPhaseApproximation
    }

    # FC Saving path
    fc_save_path = os.path.join(save_data_path, 'FC')
    if os.path.exists(fc_save_path) == False:
        os.mkdir(fc_save_path)

    lc_save_path = os.path.join(save_data_path, 'LC')
    if os.path.exists(lc_save_path) == False:
        os.mkdir(lc_save_path)

    sl_save_path = os.path.join(save_data_path, 'SL')
    if os.path.exists(sl_save_path) == False:
        os.mkdir(sl_save_path)

    vc_save_path = os.path.join(save_data_path, 'VC')
    if os.path.exists(vc_save_path) == False:
        os.mkdir(vc_save_path)

    vh_save_path = os.path.join(save_data_path, 'VH')
    if os.path.exists(vh_save_path) == False:
        os.mkdir(vh_save_path)

    #TODO Double Combinations of Intra and Extra.
    for cyn_key, cyn_val in cylinder_dict.items():
        for gsn_key, gsn_val in gaussian_dict.items():

            # File name
            model_file_name = cyn_key + '_' + gsn_key + '_watson.json'
            signal_file_name = cyn_key + '_' + gsn_key + '_watson' + '_signal.txt'

            cylinder = cyn_val()
            gaussian = gsn_val()

            # Fit Cylinder/Intra-cellular to Watson Distirbutions
            watson_dispersed_intra = SD1WatsonDistributed(
                models=[cylinder, gaussian])
            multi_compat_model = MultiCompartmentModel(
                models=[watson_dispersed_intra])

            # TODO If more than two mu exist, implies multiple orientation based measures exist
            # Hence for them we will identify them and set them to be equal to each other.
            mu_list = []
            for each_para_name in multi_compat_model.parameter_names:
                # Last three characters of parameter
                mu_type = each_para_name[-2:]
                if mu_type == 'mu':
                    mu_list.append(each_para_name)
                    #multi_compat_model.set_fixed_parameter(each_para_name, 1.7e-9)

            if len(mu_list) == 2:
                multi_compat_model.set_equal_parameter(mu_list[0], mu_list[1])
            # End of mu conditions

            print(multi_compat_model.parameter_names)

            ######## FC #########
            fc_model_fit = multi_compat_model.fit(
                Acq_Scheme,
                fc_data,
                use_parallel_processing=False,
                solver='mix')
            fc_fitted_params = fc_model_fit.fitted_parameters
            fc_model_signal = fc_model_fit.predict()

            ## Save FC Signal
            fc_model_signal = fc_model_signal[0, :].tolist()
            signal_save_path = os.path.join(fc_save_path, signal_file_name)
            np.savetxt(signal_save_path, fc_model_signal)
            #################

            ## Error Calculations
            fc_mse = fc_model_fit.mean_squared_error(fc_data)
            ##

            new_params = {}
            new_params['mse'] = fc_mse.tolist()

            for key, value in fc_fitted_params.items():
                new_params[key] = value.tolist()

            model_save_path = os.path.join(fc_save_path, model_file_name)

            with open(model_save_path, 'w') as json_file:
                json.dump(new_params, json_file)
            json_file.close()
            #####################

            ######## LC #########
            lc_model_fit = multi_compat_model.fit(
                Acq_Scheme,
                lc_data,
                use_parallel_processing=False,
                solver='mix')
            lc_fitted_params = lc_model_fit.fitted_parameters
            lc_model_signal = lc_model_fit.predict()

            ## Save LC Signal
            lc_model_signal = lc_model_signal[0, :].tolist()
            signal_save_path = os.path.join(lc_save_path, signal_file_name)
            np.savetxt(signal_save_path, lc_model_signal)
            #################

            ## Error Calculations
            lc_mse = lc_model_fit.mean_squared_error(lc_data)
            ##

            new_params = {}
            new_params['mse'] = lc_mse.tolist()

            for key, value in lc_fitted_params.items():
                new_params[key] = value.tolist()

            model_save_path = os.path.join(lc_save_path, model_file_name)

            with open(model_save_path, 'w') as json_file:
                json.dump(new_params, json_file)
            json_file.close()
            #####################

            ######## SL #########
            sl_model_fit = multi_compat_model.fit(
                Acq_Scheme,
                sl_data,
                use_parallel_processing=False,
                solver='mix')
            sl_fitted_params = sl_model_fit.fitted_parameters
            sl_model_signal = sl_model_fit.predict()

            ## Save SL Signal
            sl_model_signal = sl_model_signal[0, :].tolist()
            signal_save_path = os.path.join(sl_save_path, signal_file_name)
            np.savetxt(signal_save_path, sl_model_signal)
            #################

            ## Error Calculations
            sl_mse = sl_model_fit.mean_squared_error(sl_data)
            ##

            new_params = {}
            new_params['mse'] = sl_mse.tolist()

            for key, value in sl_fitted_params.items():
                new_params[key] = value.tolist()

            model_save_path = os.path.join(sl_save_path, model_file_name)

            with open(model_save_path, 'w') as json_file:
                json.dump(new_params, json_file)
            json_file.close()
            ######################

            ######## VC ##########
            vc_model_fit = multi_compat_model.fit(
                Acq_Scheme,
                vc_data,
                use_parallel_processing=False,
                solver='mix')
            vc_fitted_params = vc_model_fit.fitted_parameters
            vc_model_signal = vc_model_fit.predict()

            ## Save VC Signal
            vc_model_signal = vc_model_signal[0, :].tolist()
            signal_save_path = os.path.join(vc_save_path, signal_file_name)
            np.savetxt(signal_save_path, vc_model_signal)
            #################

            ## Error Calculations
            vc_mse = vc_model_fit.mean_squared_error(vc_data)
            ##

            new_params = {}
            new_params['mse'] = vc_mse.tolist()

            for key, value in vc_fitted_params.items():
                new_params[key] = value.tolist()

            model_save_path = os.path.join(vc_save_path, model_file_name)

            with open(model_save_path, 'w') as json_file:
                json.dump(new_params, json_file)
            json_file.close()
            ######################

            ######## VH #########
            vh_model_fit = multi_compat_model.fit(
                Acq_Scheme,
                vh_data,
                use_parallel_processing=False,
                solver='mix')
            vh_fitted_params = vh_model_fit.fitted_parameters
            vh_model_signal = vh_model_fit.predict()

            ## Save VH Signal
            vh_model_signal = vh_model_signal[0, :].tolist()
            signal_save_path = os.path.join(vh_save_path, signal_file_name)
            np.savetxt(signal_save_path, vh_model_signal)
            #################

            ## Error Calculations
            vh_mse = vh_model_fit.mean_squared_error(vh_data)
            ##

            new_params = {}
            new_params['mse'] = vh_mse.tolist()

            for key, value in vh_fitted_params.items():
                new_params[key] = value.tolist()

            model_save_path = os.path.join(vh_save_path, model_file_name)

            with open(model_save_path, 'w') as json_file:
                json.dump(new_params, json_file)
            json_file.close()
            ######################

            print('Model Completed wit Combination of {} and {}'.format(
                cyn_key, gsn_key))

    # TODO Triple Combinations of Intra, Extra and Rest
    for cyn_key, cyn_val in cylinder_dict.items():
        for gsn_key, gsn_val in gaussian_dict.items():
            for sph_key, sph_val in sphere_dict.items():

                cylinder = cyn_val()
                gaussian = gsn_val()
                sphere = sph_val()

                # Fit Cylinder/Intra-cellular to Watson Distirbutions
                watson_dispersed_intra = SD1WatsonDistributed(
                    models=[cylinder, gaussian])
                multi_compat_model = MultiCompartmentModel(
                    models=[watson_dispersed_intra, sphere])

                print(multi_compat_model.parameter_names)

                # TODO If more than two mu exist, implies multiple orientation based measures exist
                # Hence for them we will identify them and set them to be equal to each other.
                mu_list = []
                for each_para_name in multi_compat_model.parameter_names:
                    # Last three characters of parameter
                    mu_type = each_para_name[-2:]
                    if mu_type == 'mu':
                        mu_list.append(each_para_name)
                        # multi_compat_model.set_fixed_parameter(each_para_name, 1.7e-9)

                if len(mu_list) == 2:
                    multi_compat_model.set_equal_parameter(
                        mu_list[0], mu_list[1])
                # End of mu conditions

                # This file name is common to all voxels and describes the nomenclature
                # as the selection of models that were used based on the three components
                model_file_name = cyn_key + '_' + gsn_key + '_' + sph_key + '_watson.json'
                signal_file_name = cyn_key + '_' + gsn_key + '_' + sph_key + '_watson' + '_signal.txt'

                ######## FC #########
                fc_model_fit = multi_compat_model.fit(
                    Acq_Scheme,
                    fc_data,
                    use_parallel_processing=False,
                    solver='mix')
                fc_fitted_params = fc_model_fit.fitted_parameters
                fc_model_signal = fc_model_fit.predict()

                ## Save FC Signal
                fc_model_signal = fc_model_signal[0, :].tolist()
                signal_save_path = os.path.join(fc_save_path, signal_file_name)
                np.savetxt(signal_save_path, fc_model_signal)
                #################

                ## Error Calculations
                fc_mse = fc_model_fit.mean_squared_error(fc_data)
                ##

                new_params = {}
                new_params['mse'] = fc_mse.tolist()

                for key, value in fc_fitted_params.items():
                    new_params[key] = value.tolist()

                model_save_path = os.path.join(fc_save_path, model_file_name)

                with open(model_save_path, 'w') as json_file:
                    json.dump(new_params, json_file)
                json_file.close()
                #####################

                ######## LC #########
                lc_model_fit = multi_compat_model.fit(
                    Acq_Scheme,
                    lc_data,
                    use_parallel_processing=False,
                    solver='mix')
                lc_fitted_params = lc_model_fit.fitted_parameters
                lc_model_signal = lc_model_fit.predict()

                ## Save LC Signal
                lc_model_signal = lc_model_signal[0, :].tolist()
                signal_save_path = os.path.join(lc_save_path, signal_file_name)
                np.savetxt(signal_save_path, lc_model_signal)
                #################

                ## Error Calculations
                lc_mse = lc_model_fit.mean_squared_error(lc_data)
                ##

                new_params = {}
                new_params['mse'] = lc_mse.tolist()

                for key, value in lc_fitted_params.items():
                    new_params[key] = value.tolist()

                model_save_path = os.path.join(lc_save_path, model_file_name)

                with open(model_save_path, 'w') as json_file:
                    json.dump(new_params, json_file)
                json_file.close()
                #####################

                ######## SL #########
                sl_model_fit = multi_compat_model.fit(
                    Acq_Scheme,
                    sl_data,
                    use_parallel_processing=False,
                    solver='mix')
                sl_fitted_params = sl_model_fit.fitted_parameters
                sl_model_signal = sl_model_fit.predict()

                ## Save SL Signal
                sl_model_signal = sl_model_signal[0, :].tolist()
                signal_save_path = os.path.join(sl_save_path, signal_file_name)
                np.savetxt(signal_save_path, sl_model_signal)
                #################

                ## Error Calculations
                sl_mse = sl_model_fit.mean_squared_error(sl_data)
                ##

                new_params = {}
                new_params['mse'] = sl_mse.tolist()

                for key, value in sl_fitted_params.items():
                    new_params[key] = value.tolist()

                model_save_path = os.path.join(sl_save_path, model_file_name)

                with open(model_save_path, 'w') as json_file:
                    json.dump(new_params, json_file)
                json_file.close()
                ######################

                ######## VC ##########
                vc_model_fit = multi_compat_model.fit(
                    Acq_Scheme,
                    vc_data,
                    use_parallel_processing=False,
                    solver='mix')
                vc_fitted_params = vc_model_fit.fitted_parameters
                vc_model_signal = vc_model_fit.predict()

                ## Save VC Signal
                vc_model_signal = vc_model_signal[0, :].tolist()
                signal_save_path = os.path.join(vc_save_path, signal_file_name)
                np.savetxt(signal_save_path, vc_model_signal)
                #################

                ## Error Calculations
                vc_mse = vc_model_fit.mean_squared_error(vc_data)
                ##

                new_params = {}
                new_params['mse'] = vc_mse.tolist()

                for key, value in vc_fitted_params.items():
                    new_params[key] = value.tolist()

                model_save_path = os.path.join(vc_save_path, model_file_name)

                with open(model_save_path, 'w') as json_file:
                    json.dump(new_params, json_file)
                json_file.close()
                ######################

                ######## VH #########
                vh_model_fit = multi_compat_model.fit(
                    Acq_Scheme,
                    vh_data,
                    use_parallel_processing=False,
                    solver='mix')
                vh_fitted_params = vh_model_fit.fitted_parameters
                vh_model_signal = vh_model_fit.predict()

                ## Save VH Signal
                vh_model_signal = vh_model_signal[0, :].tolist()
                signal_save_path = os.path.join(vh_save_path, signal_file_name)
                np.savetxt(signal_save_path, vh_model_signal)
                #################

                ## Error Calculations
                vh_mse = vh_model_fit.mean_squared_error(vh_data)
                ##

                new_params = {}
                new_params['mse'] = vh_mse.tolist()

                for key, value in vh_fitted_params.items():
                    new_params[key] = value.tolist()

                model_save_path = os.path.join(vh_save_path, model_file_name)

                with open(model_save_path, 'w') as json_file:
                    json.dump(new_params, json_file)
                json_file.close()
                ######################

                print('Model Completed with Combination of {} and {} and {}'.
                      format(cyn_key, gsn_key, sph_key))

    print('All Done')
def fit_noddi_dmipy(input_dwi,
                    input_bval,
                    input_bvec,
                    input_mask,
                    output_dir,
                    nthreads=1,
                    solver='brute2fine',
                    model_type='WATSON',
                    parallel_diffusivity=1.7e-9,
                    iso_diffusivity=3e-9,
                    bids_fmt=False,
                    bids_id=''):

    import nibabel as nib
    from dmipy.signal_models import cylinder_models, gaussian_models
    from dmipy.distributions.distribute_models import SD1WatsonDistributed, SD2BinghamDistributed
    from dmipy.core.modeling_framework import MultiCompartmentModel
    from dmipy.core import modeling_framework
    from dmipy.core.acquisition_scheme import acquisition_scheme_from_bvalues
    from dipy.io import read_bvals_bvecs

    if not os.path.exists(output_dir):
        os.mkdir(output_dir)

    #Setup the acquisition scheme
    bvals, bvecs = read_bvals_bvecs(input_bval, input_bvec)
    bvals_SI = bvals * 1e6
    acq_scheme = acquisition_scheme_from_bvalues(bvals_SI, bvecs)
    acq_scheme.print_acquisition_info

    #Load the data
    img = nib.load(input_dwi)
    data = img.get_data()

    #Load the mask
    img = nib.load(input_mask)
    mask_data = img.get_data()

    ball = gaussian_models.G1Ball()  #CSF
    stick = cylinder_models.C1Stick()  #Intra-axonal diffusion
    zeppelin = gaussian_models.G2Zeppelin()  #Extra-axonal diffusion

    if model_type == 'Bingham' or model_type == 'BINGHAM':
        dispersed_bundle = SD2BinghamDistributed(models=[stick, zeppelin])
    else:
        dispersed_bundle = SD1WatsonDistributed(models=[stick, zeppelin])

    dispersed_bundle.set_tortuous_parameter('G2Zeppelin_1_lambda_perp',
                                            'C1Stick_1_lambda_par',
                                            'partial_volume_0')
    dispersed_bundle.set_equal_parameter('G2Zeppelin_1_lambda_par',
                                         'C1Stick_1_lambda_par')
    dispersed_bundle.set_fixed_parameter('G2Zeppelin_1_lambda_par',
                                         parallel_diffusivity)

    NODDI_mod = MultiCompartmentModel(models=[ball, dispersed_bundle])
    NODDI_mod.set_fixed_parameter('G1Ball_1_lambda_iso', iso_diffusivity)
    NODDI_fit = NODDI_mod.fit(acq_scheme,
                              data,
                              mask=mask_data,
                              number_of_processors=nthreads,
                              solver=solver)

    fitted_parameters = NODDI_fit.fitted_parameters

    if model_type == 'Bingham' or model_type == 'BINGHAM':
        # get total Stick signal contribution
        vf_intra = (
            fitted_parameters['SD2BinghamDistributed_1_partial_volume_0'] *
            fitted_parameters['partial_volume_1'])

        # get total Zeppelin signal contribution
        vf_extra = (
            (1 - fitted_parameters['SD2BinghamDistributed_1_partial_volume_0'])
            * fitted_parameters['partial_volume_1'])
        vf_iso = fitted_parameters['partial_volume_0']
        odi = fitted_parameters['SD2BinghamDistributed_1_SD2Bingham_1_odi']

    else:
        # get total Stick signal contribution
        vf_intra = (
            fitted_parameters['SD1WatsonDistributed_1_partial_volume_0'] *
            fitted_parameters['partial_volume_1'])

        # get total Zeppelin signal contribution
        vf_extra = (
            (1 - fitted_parameters['SD1WatsonDistributed_1_partial_volume_0'])
            * fitted_parameters['partial_volume_1'])
        vf_iso = fitted_parameters['partial_volume_0']
        odi = fitted_parameters['SD1WatsonDistributed_1_SD1Watson_1_odi']

    if bids_fmt:
        output_odi = output_dir + '/' + bids_id + '_model-NODDI_parameter-ODI.nii.gz'
        output_vf_intra = output_dir + '/' + bids_id + '_model-NODDI_parameter-ICVF.nii.gz'
        output_vf_extra = output_dir + '/' + bids_id + '_model-NODDI_parameter-EXVF.nii.gz'
        output_vf_iso = output_dir + '/' + bids_id + '_model-NODDI_parameter-ISO.nii.gz'
    else:
        output_odi = output_dir + '/noddi_ODI.nii.gz'
        output_vf_intra = output_dir + '/noddi_ICVF.nii.gz'
        output_vf_extra = output_dir + '/noddi_EXVF.nii.gz'
        output_vf_iso = output_dir + '/noddi_ISO.nii.gz'

    #Save the images
    odi_img = nib.Nifti1Image(odi, img.get_affine(), img.header)
    odi_img.set_sform(img.get_sform())
    odi_img.set_qform(img.get_qform())
    nib.save(odi_img, output_odi)

    icvf_img = nib.Nifti1Image(vf_intra, img.get_affine(), img.header)
    icvf_img.set_sform(img.get_sform())
    icvf_img.set_qform(img.get_qform())
    nib.save(icvf_img, output_vf_intra)

    ecvf_img = nib.Nifti1Image(vf_extra, img.get_affine(), img.header)
    ecvf_img.set_sform(img.get_sform())
    ecvf_img.set_qform(img.get_qform())
    nib.save(ecvf_img, output_vf_extra)

    iso_img = nib.Nifti1Image(vf_iso, img.get_affine(), img.header)
    iso_img.set_sform(img.get_sform())
    iso_img.set_qform(img.get_qform())
    nib.save(iso_img, output_vf_iso)
Пример #6
0
def main():
    # Plot Save Path
    base_plot_path = r'/nfs/masi/nathv/py_src_code_2020/dmipy_model_pictures'
    base_plot_path = os.path.normpath(base_plot_path)

    # Method Saving Paths
    base_save_path = r'/nfs/masi/nathv/miccai_2020/micro_methods_hcp_mini'
    base_save_path = os.path.normpath(base_save_path)

    # Create base saving path for Method
    # TODO The Method name can be made an argument later on
    method_name = 'NODDI_WATSON'

    # Base HCP Data Path
    base_data_path = r'/nfs/HCP/data'
    base_data_path = os.path.normpath(base_data_path)

    # Subject ID's list
    #subj_ID_List = ['125525', '118225', '116726']
    subj_ID_List = ['115017', '114823', '116726', '118225']
    # TODO When needed loop here over the ID list
    for subj_ID in subj_ID_List:
        # Subject Save Path
        subj_save_path = os.path.join(base_save_path, subj_ID)
        if os.path.exists(subj_save_path) == False:
            os.mkdir(subj_save_path)

        # TODO For later the subject data, bval and bvec reading part can be put inside a function
        subj_data_path = os.path.join(base_data_path, subj_ID, 'T1w',
                                      'Diffusion')

        # Read the Nifti file, bvals and bvecs
        subj_bvals = np.loadtxt(os.path.join(subj_data_path, 'bvals'))
        subj_bvecs = np.loadtxt(os.path.join(subj_data_path, 'bvecs'))

        all_bvals = subj_bvals * 1e6
        all_bvecs = np.transpose(subj_bvecs)

        subj_Acq_Scheme = acquisition_scheme_from_bvalues(all_bvals, all_bvecs)
        print(subj_Acq_Scheme.print_acquisition_info)

        print('Loading the Nifti Data ...')
        data_start_time = time.time()

        subj_babel_object = nib.load(
            os.path.join(subj_data_path, 'data.nii.gz'))
        subj_data = subj_babel_object.get_fdata()
        axial_slice_data = subj_data[50:65, 50:65, 60:62, :]

        data_end_time = time.time()
        data_time = np.int(np.round(data_end_time - data_start_time))

        print('Data Loaded ... Time Taken: {}'.format(data_end_time -
                                                      data_start_time))
        print('The Data Dimensions are: {}'.format(subj_data.shape))

        #### NODDI Watson ####
        ball = gaussian_models.G1Ball()
        stick = cylinder_models.C1Stick()
        zeppelin = gaussian_models.G2Zeppelin()

        watson_dispersed_bundle = SD1WatsonDistributed(
            models=[stick, zeppelin])

        watson_dispersed_bundle.set_tortuous_parameter(
            'G2Zeppelin_1_lambda_perp', 'C1Stick_1_lambda_par',
            'partial_volume_0')
        watson_dispersed_bundle.set_equal_parameter('G2Zeppelin_1_lambda_par',
                                                    'C1Stick_1_lambda_par')
        watson_dispersed_bundle.set_fixed_parameter('G2Zeppelin_1_lambda_par',
                                                    1.7e-9)

        NODDI_mod = MultiCompartmentModel(
            models=[ball, watson_dispersed_bundle])
        NODDI_mod.set_fixed_parameter('G1Ball_1_lambda_iso', 3e-9)

        print('Fitting the NODDI Model ...')
        fit_start_time = time.time()
        NODDI_fit_hcp = NODDI_mod.fit(subj_Acq_Scheme,
                                      subj_data,
                                      mask=subj_data[..., 0] > 0)
        fit_end_time = time.time()
        print('Model Fitting Completed ... Time Taken to fit: {}'.format(
            fit_end_time - fit_start_time))
        fit_time = np.int(np.round(fit_end_time - fit_start_time))

        fitted_parameters = NODDI_fit_hcp.fitted_parameters

        para_Names_list = []
        for key, value in fitted_parameters.items():
            para_Names_list.append(key)

        ### Nifti Saving Part
        # Create a directory per subject
        subj_method_save_path = os.path.join(subj_save_path, method_name)
        if os.path.exists(subj_method_save_path) == False:
            os.mkdir(subj_method_save_path)

        # Retrieve the affine from already Read Nifti file to form the header
        affine = subj_babel_object.affine

        # Loop over fitted parameters name list
        for each_fitted_parameter in para_Names_list:
            new_img = nib.Nifti1Image(fitted_parameters[each_fitted_parameter],
                                      affine)

            # Form the file path
            f_name = each_fitted_parameter + '.nii.gz'
            param_file_path = os.path.join(subj_method_save_path, f_name)

            nib.save(new_img, param_file_path)

    return None
Пример #7
0
    bvalues_SI = bvalues * 1e6 
    acq_scheme = acquisition_scheme_from_bvalues(bvalues_SI, gradient_directions_normalized, delta, Delta)
    # gtab_dipy = gradient_table(bvalues, gradient_directions, big_delta=Delta, small_delta=delta, atol=3e-2)
    # acq_scheme = gtab_dipy2mipy(gtab_dipy)

    acq_scheme.print_acquisition_info
    
    dwi_nii = nib.load(allDwiNames[iMask])
    dwi = dwi_nii.get_data()
    mask = nib.load(allMaskNames[iMask]).get_data()

    ball = gaussian_models.G1Ball()
    stick = cylinder_models.C1Stick()
    zeppelin = gaussian_models.G2Zeppelin()

    watson_dispersed_bundle1 = SD1WatsonDistributed(models=[stick, zeppelin])

    print watson_dispersed_bundle1.parameter_names

    watson_dispersed_bundle1.set_tortuous_parameter('G2Zeppelin_1_lambda_perp','C1Stick_1_lambda_par','partial_volume_0')
    watson_dispersed_bundle1.set_equal_parameter('G2Zeppelin_1_lambda_par', 'C1Stick_1_lambda_par')
    watson_dispersed_bundle1.set_fixed_parameter('G2Zeppelin_1_lambda_par', 1.7e-9)

    watson_dispersed_bundle2 = watson_dispersed_bundle1.copy()
    NODDIx_mod = MultiCompartmentModel(models=[ball, watson_dispersed_bundle1, watson_dispersed_bundle2])

    print NODDIx_mod.parameter_names

    NODDIx_mod.set_fixed_parameter('G1Ball_1_lambda_iso', 3e-9)

    NODDIx_fit = NODDIx_mod.fit(acq_scheme, dwi, mask = mask, solver='mix', maxiter=300)
Пример #8
0
    def _build_watson_dispersed_model(self):
        watson_dispersed_bundle = SD1WatsonDistributed(
            models=[self.stick, self.zeppelin])

        return self._set_watson_parameters(watson_dispersed_bundle)