Exemplo n.º 1
0
def create_noddi_watson_model(lambda_iso_diff=3.e-9, lambda_par_diff=1.7e-9):
    """Creates NODDI mulit-compartment model with Watson distribution."""
    """
        Arguments:
            lambda_iso_diff: float
                isotropic diffusivity
            lambda_par_diff: float
                parallel diffusivity
        Returns: MultiCompartmentModel instance
            NODDI Watson multi-compartment model instance
    """
    ball = gaussian_models.G1Ball()
    stick = cylinder_models.C1Stick()
    zeppelin = gaussian_models.G2Zeppelin()
    watson_dispersed_bundle = SD1WatsonDistributed(models=[stick, zeppelin])
    watson_dispersed_bundle.set_tortuous_parameter('G2Zeppelin_1_lambda_perp',
                                                   'C1Stick_1_lambda_par',
                                                   'partial_volume_0')
    watson_dispersed_bundle.set_equal_parameter('G2Zeppelin_1_lambda_par',
                                                'C1Stick_1_lambda_par')
    watson_dispersed_bundle.set_fixed_parameter('G2Zeppelin_1_lambda_par',
                                                lambda_par_diff)

    NODDI_mod = MultiCompartmentModel(models=[ball, watson_dispersed_bundle])
    NODDI_mod.set_fixed_parameter('G1Ball_1_lambda_iso', lambda_iso_diff)

    return NODDI_mod
Exemplo n.º 2
0
def test_acq_scheme_without_deltas_model_catch():
    scheme = wu_minn_hcp_acquisition_scheme()
    test_data = np.random.rand(len(scheme.bvalues))
    scheme_clinical = acquisition_scheme_from_bvalues(
        scheme.bvalues, scheme.gradient_directions)
    mc_model = MultiCompartmentModel([C4CylinderGaussianPhaseApproximation()])
    assert_raises(ValueError, mc_model.fit, scheme_clinical, test_data)
def main():

    # Base Path of all given files for All models are wrong
    base_path = r'/nfs/masi/nathv/memento_2020/all_models_are_wrong/files_project_2927_session_1436090/'
    base_path = os.path.normpath(base_path)

    # Just dealing with PGSE for now
    pgse_acq_params_path = os.path.join(base_path, 'PGSE_AcqParams.txt')
    pgse_signal_path = os.path.join(base_path, 'PGSE_Simulations.txt')

    # Read files via Numpy
    pgse_acq_params = np.loadtxt(pgse_acq_params_path)
    pgse_signal_data = np.loadtxt(pgse_signal_path)
    pgse_example_sub_diff = np.loadtxt(
        '/nfs/masi/nathv/memento_2020/all_models_are_wrong/files_project_2927_session_1436090/2-AllModelsAreWrong-ExampleSubmissions/DIffusivity-ExampleSubmission3/PGSE.txt'
    )
    pgse_example_sub_volfrac = np.loadtxt(
        '/nfs/masi/nathv/memento_2020/all_models_are_wrong/files_project_2927_session_1436090/2-AllModelsAreWrong-ExampleSubmissions/VolumeFraction-ExampleSubmission3/PGSE.txt'
    )

    # Transpose the Signal data
    pgse_signal_data = pgse_signal_data.transpose()

    # Dissect the acquisition parameters to form the Acquisition Table
    bvecs = pgse_acq_params[:, 1:4]
    bvals = pgse_acq_params[:, 6] * 1e6
    grad_str = pgse_acq_params[:, 0]
    small_del = pgse_acq_params[:, 4]
    big_del = pgse_acq_params[:, 5]

    subj_Acq_Scheme = acquisition_scheme_from_bvalues(bvals,
                                                      bvecs,
                                                      delta=small_del,
                                                      Delta=big_del)

    print(subj_Acq_Scheme.print_acquisition_info)

    #### NODDI Watson ####
    ball = gaussian_models.G1Ball()
    stick = cylinder_models.C1Stick()
    zeppelin = gaussian_models.G2Zeppelin()

    bingham_dispersed_bundle = SD2BinghamDistributed(models=[stick, zeppelin])

    bingham_dispersed_bundle.set_tortuous_parameter('G2Zeppelin_1_lambda_perp',
                                                    'C1Stick_1_lambda_par',
                                                    'partial_volume_0')
    bingham_dispersed_bundle.set_equal_parameter('G2Zeppelin_1_lambda_par',
                                                 'C1Stick_1_lambda_par')
    bingham_dispersed_bundle.set_fixed_parameter('G2Zeppelin_1_lambda_par',
                                                 1.7e-9)

    NODDI_bingham_mod = MultiCompartmentModel(
        models=[ball, bingham_dispersed_bundle])
    NODDI_bingham_mod.set_fixed_parameter('G1Ball_1_lambda_iso', 3e-9)

    print('Fitting the NODDI Model ...')
    fit_start_time = time.time()
    NODDI_fit_hcp = NODDI_bingham_mod.fit(subj_Acq_Scheme,
                                          pgse_signal_data,
                                          use_parallel_processing=True,
                                          number_of_processors=8)

    fit_end_time = time.time()
    print('Model Fitting Completed ... Time Taken to fit: {}'.format(
        fit_end_time - fit_start_time))
    fit_time = np.int(np.round(fit_end_time - fit_start_time))

    sub_1_pv0 = NODDI_fit_hcp.fitted_parameters['partial_volume_0']
    sub_2_pv1 = NODDI_fit_hcp.fitted_parameters['partial_volume_1']

    np.savetxt('noddi_bingham_pv0.txt', sub_1_pv0)
    np.savetxt('noddi_bingham_pv1.txt', sub_2_pv1)

    print('Debug here')

    return None
Exemplo n.º 4
0
def main():

    # Define Base Data Paths here
    base_data_path = r'/nfs/masi/nathv/spinal_cord_data_2020/SingleVoxelSignals/SingleVoxelSignals'
    base_data_path = os.path.normpath(base_data_path)

    # Define Saving Paths here
    save_data_path = r'/nfs/masi/nathv/spinal_cord_data_2020/results/intra_extra'
    save_data_path = os.path.normpath(save_data_path)
    if os.path.exists(save_data_path) == False:
        os.mkdir(save_data_path)

    # Scheme and The Directions file Paths
    scheme_path = os.path.join(base_data_path, 'scheme.scheme')
    bvecs_path = os.path.join(base_data_path, 'BVECS.bvec')
    bvals_path = os.path.join(base_data_path, 'BVALS.bval')

    # Voxel Paths
    voxel_fc_path = os.path.join(base_data_path, 'FasciulusCuneatus.txt')
    voxel_lc_path = os.path.join(base_data_path, 'LateralCST.txt')
    voxel_sl_path = os.path.join(base_data_path, 'SpinalLemniscus.txt')
    voxel_vc_path = os.path.join(base_data_path, 'VentralCST.txt')
    voxel_vh_path = os.path.join(base_data_path, 'VentralHorn.txt')

    # Reading the Scheme and the Directions
    scheme_data = np.loadtxt(scheme_path)
    bvecs_data = np.loadtxt(bvecs_path)
    bvals_data = np.loadtxt(bvals_path)

    # Read the voxel Data
    fc_data = []
    with open(voxel_fc_path) as csvfile:
        readcsv = csv.reader(csvfile, delimiter=',')
        for row in readcsv:
            fc_data.append(row)
    csvfile.close()
    fc_data = np.asarray(fc_data, dtype='float32')
    print('FC Voxel Shape: {}'.format(fc_data.shape))

    lc_data = []
    with open(voxel_lc_path) as csvfile:
        readcsv = csv.reader(csvfile, delimiter=',')
        for row in readcsv:
            lc_data.append(row)
    csvfile.close()
    lc_data = np.asarray(lc_data, dtype='float32')
    print('LC Voxel Shape: {}'.format(lc_data.shape))

    sl_data = []
    with open(voxel_sl_path) as csvfile:
        readcsv = csv.reader(csvfile, delimiter=',')
        for row in readcsv:
            sl_data.append(row)
    csvfile.close()
    sl_data = np.asarray(sl_data, dtype='float32')
    print('SL Voxel Shape: {}'.format(sl_data.shape))

    vc_data = []
    with open(voxel_vc_path) as csvfile:
        readcsv = csv.reader(csvfile, delimiter=',')
        for row in readcsv:
            vc_data.append(row)
    csvfile.close()
    vc_data = np.asarray(vc_data, dtype='float32')
    print('VC Voxel Shape: {}'.format(vc_data.shape))

    vh_data = []
    with open(voxel_vh_path) as csvfile:
        readcsv = csv.reader(csvfile, delimiter=',')
        for row in readcsv:
            vh_data.append(row)
    csvfile.close()
    vh_data = np.asarray(vh_data, dtype='float32')
    print('VH Voxel Shape: {}'.format(vh_data.shape))

    print('All Data Loaded ...')

    print('Constructing Acquisition Schemes')
    all_bvals = bvals_data * 1e6
    all_bvecs = np.transpose(bvecs_data)

    little_delta = scheme_data[:, 0]
    big_delta = scheme_data[:, 1]
    t_e = scheme_data[:, 4]

    Acq_Scheme = acquisition_scheme_from_bvalues(all_bvals,
                                                 all_bvecs,
                                                 delta=little_delta * 1e-3,
                                                 Delta=big_delta * 1e-3,
                                                 TE=t_e * 1e-3)

    cylinder_dict = {
        'C1': cyn.C1Stick,
        'C2': cyn.C2CylinderStejskalTannerApproximation,
        'C3': cyn.C3CylinderCallaghanApproximation,
        'C4': cyn.C4CylinderGaussianPhaseApproximation
    }

    gaussian_dict = {
        'G1': gsn.G1Ball,
        'G2': gsn.G2Zeppelin,
        'G3': gsn.G3TemporalZeppelin
    }

    # FC Saving path
    fc_save_path = os.path.join(save_data_path, 'FC')
    if os.path.exists(fc_save_path) == False:
        os.mkdir(fc_save_path)

    lc_save_path = os.path.join(save_data_path, 'LC')
    if os.path.exists(lc_save_path) == False:
        os.mkdir(lc_save_path)

    sl_save_path = os.path.join(save_data_path, 'SL')
    if os.path.exists(sl_save_path) == False:
        os.mkdir(sl_save_path)

    vc_save_path = os.path.join(save_data_path, 'VC')
    if os.path.exists(vc_save_path) == False:
        os.mkdir(vc_save_path)

    vh_save_path = os.path.join(save_data_path, 'VH')
    if os.path.exists(vh_save_path) == False:
        os.mkdir(vh_save_path)

    for cyn_key, cyn_val in cylinder_dict.items():
        for gsn_key, gsn_val in gaussian_dict.items():

            cylinder = cyn_val()
            gaussian = gsn_val()

            multi_compat_model = MultiCompartmentModel(
                models=[cylinder, gaussian])

            ######## FC #########
            fc_model_fit = multi_compat_model.fit(
                Acq_Scheme,
                fc_data,
                use_parallel_processing=False,
                solver='mix')
            fc_fitted_params = fc_model_fit.fitted_parameters

            ## Error Calculations
            fc_mse = fc_model_fit.mean_squared_error(fc_data)
            fc_R2 = fc_model_fit.R2_coefficient_of_determination(fc_data)
            ##

            new_params = {}
            new_params['mse'] = fc_mse.tolist()
            new_params['R2'] = fc_R2.tolist()

            for key, value in fc_fitted_params.items():
                new_params[key] = value.tolist()

            model_file_name = cyn_key + '_' + gsn_key + '.json'
            model_save_path = os.path.join(fc_save_path, model_file_name)

            with open(model_save_path, 'w') as json_file:
                json.dump(new_params, json_file)
            json_file.close()
            #####################

            ######## LC #########
            lc_model_fit = multi_compat_model.fit(
                Acq_Scheme,
                lc_data,
                use_parallel_processing=False,
                solver='mix')
            lc_fitted_params = lc_model_fit.fitted_parameters

            ## Error Calculations
            lc_mse = lc_model_fit.mean_squared_error(lc_data)
            lc_R2 = lc_model_fit.R2_coefficient_of_determination(lc_data)
            ##

            new_params = {}
            new_params['mse'] = lc_mse.tolist()
            new_params['R2'] = lc_R2.tolist()

            for key, value in lc_fitted_params.items():
                new_params[key] = value.tolist()

            model_file_name = cyn_key + '_' + gsn_key + '.json'
            model_save_path = os.path.join(lc_save_path, model_file_name)

            with open(model_save_path, 'w') as json_file:
                json.dump(new_params, json_file)
            json_file.close()
            ######################

            ######## SL #########
            sl_model_fit = multi_compat_model.fit(
                Acq_Scheme,
                sl_data,
                use_parallel_processing=False,
                solver='mix')
            sl_fitted_params = sl_model_fit.fitted_parameters

            ## Error Calculations
            sl_mse = sl_model_fit.mean_squared_error(sl_data)
            sl_R2 = sl_model_fit.R2_coefficient_of_determination(sl_data)
            ##

            new_params = {}
            new_params['mse'] = sl_mse.tolist()
            new_params['R2'] = sl_R2.tolist()

            for key, value in sl_fitted_params.items():
                new_params[key] = value.tolist()

            model_file_name = cyn_key + '_' + gsn_key + '.json'
            model_save_path = os.path.join(sl_save_path, model_file_name)

            with open(model_save_path, 'w') as json_file:
                json.dump(new_params, json_file)
            json_file.close()
            ######################

            ######## VC #########
            vc_model_fit = multi_compat_model.fit(
                Acq_Scheme,
                vc_data,
                use_parallel_processing=False,
                solver='mix')
            vc_fitted_params = vc_model_fit.fitted_parameters

            ## Error Calculations
            vc_mse = vc_model_fit.mean_squared_error(vc_data)
            vc_R2 = vc_model_fit.R2_coefficient_of_determination(vc_data)
            ##

            new_params = {}
            new_params['mse'] = vc_mse.tolist()
            new_params['R2'] = vc_R2.tolist()

            for key, value in vc_fitted_params.items():
                new_params[key] = value.tolist()

            model_file_name = cyn_key + '_' + gsn_key + '.json'
            model_save_path = os.path.join(vc_save_path, model_file_name)

            with open(model_save_path, 'w') as json_file:
                json.dump(new_params, json_file)
            json_file.close()
            ######################

            ######## VH #########
            vh_model_fit = multi_compat_model.fit(
                Acq_Scheme,
                vh_data,
                use_parallel_processing=False,
                solver='mix')
            vh_fitted_params = vh_model_fit.fitted_parameters

            ## Error Calculations
            vh_mse = vh_model_fit.mean_squared_error(vh_data)
            vh_R2 = vh_model_fit.R2_coefficient_of_determination(vh_data)
            ##

            new_params = {}
            new_params['mse'] = vh_mse.tolist()
            new_params['R2'] = vh_R2.tolist()

            for key, value in vh_fitted_params.items():
                new_params[key] = value.tolist()

            model_file_name = cyn_key + '_' + gsn_key + '.json'
            model_save_path = os.path.join(vh_save_path, model_file_name)

            with open(model_save_path, 'w') as json_file:
                json.dump(new_params, json_file)
            json_file.close()
            ######################

            print('Model Completed wit Combination of {} and {}'.format(
                cyn_key, gsn_key))

    print('All Done')

    return None
def main():

    # Define Base Data Paths here
    base_data_path = r'/nfs/masi/nathv/spinal_cord_data_2020/SingleVoxelSignals_norm/SingleVoxelSignals_norm'
    base_data_path = os.path.normpath(base_data_path)

    # Define Saving Paths here
    save_data_path = r'/nfs/masi/nathv/spinal_cord_data_2020/results_norm/intra_extra_rest'
    save_data_path = os.path.normpath(save_data_path)
    if os.path.exists(save_data_path) == False:
        os.mkdir(save_data_path)

    # Scheme and The Directions file Paths
    scheme_path = os.path.join(base_data_path, 'scheme.scheme')
    bvecs_path = os.path.join(base_data_path, 'BVECS.bvec')
    bvals_path = os.path.join(base_data_path, 'BVALS.bval')

    # Voxel Paths
    voxel_fc_path = os.path.join(base_data_path, 'FasciulusCuneatus.txt')
    voxel_lc_path = os.path.join(base_data_path, 'LateralCST.txt')
    voxel_sl_path = os.path.join(base_data_path, 'SpinalLemniscus.txt')
    voxel_vc_path = os.path.join(base_data_path, 'VentralCST.txt')
    voxel_vh_path = os.path.join(base_data_path, 'VentralHorn.txt')

    # Reading the Scheme and the Directions
    scheme_data = np.loadtxt(scheme_path)
    bvecs_data = np.loadtxt(bvecs_path)
    bvals_data = np.loadtxt(bvals_path)

    # Read the voxel Data
    fc_data = []
    with open(voxel_fc_path) as csvfile:
        readcsv = csv.reader(csvfile, delimiter=',')
        for row in readcsv:
            fc_data.append(row)
    csvfile.close()
    fc_data = np.asarray(fc_data, dtype='float32')
    print('FC Voxel Shape: {}'.format(fc_data.shape))

    lc_data = []
    with open(voxel_lc_path) as csvfile:
        readcsv = csv.reader(csvfile, delimiter=',')
        for row in readcsv:
            lc_data.append(row)
    csvfile.close()
    lc_data = np.asarray(lc_data, dtype='float32')
    print('LC Voxel Shape: {}'.format(lc_data.shape))

    sl_data = []
    with open(voxel_sl_path) as csvfile:
        readcsv = csv.reader(csvfile, delimiter=',')
        for row in readcsv:
            sl_data.append(row)
    csvfile.close()
    sl_data = np.asarray(sl_data, dtype='float32')
    print('SL Voxel Shape: {}'.format(sl_data.shape))

    vc_data = []
    with open(voxel_vc_path) as csvfile:
        readcsv = csv.reader(csvfile, delimiter=',')
        for row in readcsv:
            vc_data.append(row)
    csvfile.close()
    vc_data = np.asarray(vc_data, dtype='float32')
    print('VC Voxel Shape: {}'.format(vc_data.shape))

    vh_data = []
    with open(voxel_vh_path) as csvfile:
        readcsv = csv.reader(csvfile, delimiter=',')
        for row in readcsv:
            vh_data.append(row)
    csvfile.close()
    vh_data = np.asarray(vh_data, dtype='float32')
    print('VH Voxel Shape: {}'.format(vh_data.shape))

    print('All Data Loaded ...')

    print('Constructing Acquisition Schemes')
    all_bvals = bvals_data * 1e6
    all_bvecs = np.transpose(bvecs_data)

    little_delta = scheme_data[:, 0]
    big_delta = scheme_data[:, 1]
    #t_e = scheme_data[:, 4]

    Acq_Scheme = acquisition_scheme_from_bvalues(all_bvals,
                                                 all_bvecs,
                                                 delta=little_delta * 1e-3,
                                                 Delta=big_delta * 1e-3)

    print(Acq_Scheme.print_acquisition_info)

    cylinder_dict = {
        'C1': cyn.C1Stick,
        'C2': cyn.C2CylinderStejskalTannerApproximation,
        'C3': cyn.C3CylinderCallaghanApproximation,
        'C4': cyn.C4CylinderGaussianPhaseApproximation
    }

    gaussian_dict = {'G1': gsn.G1Ball, 'G2': gsn.G2Zeppelin}

    sphere_dict = {
        'S1': sph.S1Dot,
        'S2': sph.S2SphereStejskalTannerApproximation,
        'S4': sph.S4SphereGaussianPhaseApproximation
    }

    # FC Saving path
    fc_save_path = os.path.join(save_data_path, 'FC')
    if os.path.exists(fc_save_path) == False:
        os.mkdir(fc_save_path)

    lc_save_path = os.path.join(save_data_path, 'LC')
    if os.path.exists(lc_save_path) == False:
        os.mkdir(lc_save_path)

    sl_save_path = os.path.join(save_data_path, 'SL')
    if os.path.exists(sl_save_path) == False:
        os.mkdir(sl_save_path)

    vc_save_path = os.path.join(save_data_path, 'VC')
    if os.path.exists(vc_save_path) == False:
        os.mkdir(vc_save_path)

    vh_save_path = os.path.join(save_data_path, 'VH')
    if os.path.exists(vh_save_path) == False:
        os.mkdir(vh_save_path)

    #TODO Double Combinations of Intra and Extra.
    for cyn_key, cyn_val in cylinder_dict.items():
        for gsn_key, gsn_val in gaussian_dict.items():

            # File name
            model_file_name = cyn_key + '_' + gsn_key + '.json'
            signal_file_name = cyn_key + '_' + gsn_key + '_signal.txt'

            cylinder = cyn_val()
            gaussian = gsn_val()

            multi_compat_model = MultiCompartmentModel(
                models=[cylinder, gaussian])

            # TODO If more than two mu exist, implies multiple orientation based measures exist
            # Hence for them we will identify them and set them to be equal to each other.
            mu_list = []
            for each_para_name in multi_compat_model.parameter_names:
                # Last three characters of parameter
                mu_type = each_para_name[-2:]
                if mu_type == 'mu':
                    mu_list.append(each_para_name)
                    #multi_compat_model.set_fixed_parameter(each_para_name, 1.7e-9)

            if len(mu_list) == 2:
                multi_compat_model.set_equal_parameter(mu_list[0], mu_list[1])
            # End of mu conditions
            print(multi_compat_model.parameter_names)

            ######## FC #########
            fc_model_fit = multi_compat_model.fit(
                Acq_Scheme,
                fc_data,
                use_parallel_processing=False,
                solver='mix')
            fc_fitted_params = fc_model_fit.fitted_parameters
            fc_model_signal = fc_model_fit.predict()

            ## Save FC Signal
            fc_model_signal = fc_model_signal[0, :].tolist()
            signal_save_path = os.path.join(fc_save_path, signal_file_name)
            np.savetxt(signal_save_path, fc_model_signal)
            #################

            ## Error Calculations
            fc_mse = fc_model_fit.mean_squared_error(fc_data)
            ##

            new_params = {}
            new_params['mse'] = fc_mse.tolist()

            for key, value in fc_fitted_params.items():
                new_params[key] = value.tolist()

            model_save_path = os.path.join(fc_save_path, model_file_name)

            with open(model_save_path, 'w') as json_file:
                json.dump(new_params, json_file)
            json_file.close()
            #####################

            ######## LC #########
            lc_model_fit = multi_compat_model.fit(
                Acq_Scheme,
                lc_data,
                use_parallel_processing=False,
                solver='mix')
            lc_fitted_params = lc_model_fit.fitted_parameters
            lc_model_signal = lc_model_fit.predict()

            ## Save LC Signal
            lc_model_signal = lc_model_signal[0, :].tolist()
            signal_save_path = os.path.join(lc_save_path, signal_file_name)
            np.savetxt(signal_save_path, lc_model_signal)
            #################

            ## Error Calculations
            lc_mse = lc_model_fit.mean_squared_error(lc_data)
            ##

            new_params = {}
            new_params['mse'] = lc_mse.tolist()

            for key, value in lc_fitted_params.items():
                new_params[key] = value.tolist()

            model_save_path = os.path.join(lc_save_path, model_file_name)

            with open(model_save_path, 'w') as json_file:
                json.dump(new_params, json_file)
            json_file.close()
            #####################

            ######## SL #########
            sl_model_fit = multi_compat_model.fit(
                Acq_Scheme,
                sl_data,
                use_parallel_processing=False,
                solver='mix')
            sl_fitted_params = sl_model_fit.fitted_parameters
            sl_model_signal = sl_model_fit.predict()

            ## Save SL Signal
            sl_model_signal = sl_model_signal[0, :].tolist()
            signal_save_path = os.path.join(sl_save_path, signal_file_name)
            np.savetxt(signal_save_path, sl_model_signal)
            #################

            ## Error Calculations
            sl_mse = sl_model_fit.mean_squared_error(sl_data)
            ##

            new_params = {}
            new_params['mse'] = sl_mse.tolist()

            for key, value in sl_fitted_params.items():
                new_params[key] = value.tolist()

            model_save_path = os.path.join(sl_save_path, model_file_name)

            with open(model_save_path, 'w') as json_file:
                json.dump(new_params, json_file)
            json_file.close()
            ######################

            ######## VC ##########
            vc_model_fit = multi_compat_model.fit(
                Acq_Scheme,
                vc_data,
                use_parallel_processing=False,
                solver='mix')
            vc_fitted_params = vc_model_fit.fitted_parameters
            vc_model_signal = vc_model_fit.predict()

            ## Save VC Signal
            vc_model_signal = vc_model_signal[0, :].tolist()
            signal_save_path = os.path.join(vc_save_path, signal_file_name)
            np.savetxt(signal_save_path, vc_model_signal)
            #################

            ## Error Calculations
            vc_mse = vc_model_fit.mean_squared_error(vc_data)
            ##

            new_params = {}
            new_params['mse'] = vc_mse.tolist()

            for key, value in vc_fitted_params.items():
                new_params[key] = value.tolist()

            model_save_path = os.path.join(vc_save_path, model_file_name)

            with open(model_save_path, 'w') as json_file:
                json.dump(new_params, json_file)
            json_file.close()
            ######################

            ######## VH #########
            vh_model_fit = multi_compat_model.fit(
                Acq_Scheme,
                vh_data,
                use_parallel_processing=False,
                solver='mix')
            vh_fitted_params = vh_model_fit.fitted_parameters
            vh_model_signal = vh_model_fit.predict()

            ## Save VH Signal
            vh_model_signal = vh_model_signal[0, :].tolist()
            signal_save_path = os.path.join(vh_save_path, signal_file_name)
            np.savetxt(signal_save_path, vh_model_signal)
            #################

            ## Error Calculations
            vh_mse = vh_model_fit.mean_squared_error(vh_data)
            ##

            new_params = {}
            new_params['mse'] = vh_mse.tolist()

            for key, value in vh_fitted_params.items():
                new_params[key] = value.tolist()

            model_save_path = os.path.join(vh_save_path, model_file_name)

            with open(model_save_path, 'w') as json_file:
                json.dump(new_params, json_file)
            json_file.close()
            ######################

            print('Model Completed wit Combination of {} and {}'.format(
                cyn_key, gsn_key))

    # TODO Triple Combinations of Intra, Extra and Rest
    for cyn_key, cyn_val in cylinder_dict.items():
        for gsn_key, gsn_val in gaussian_dict.items():
            for sph_key, sph_val in sphere_dict.items():

                cylinder = cyn_val()
                gaussian = gsn_val()
                sphere = sph_val()

                multi_compat_model = MultiCompartmentModel(
                    models=[cylinder, gaussian, sphere])
                print(multi_compat_model.parameter_names)

                # TODO If more than two mu exist, implies multiple orientation based measures exist
                # Hence for them we will identify them and set them to be equal to each other.
                mu_list = []
                for each_para_name in multi_compat_model.parameter_names:
                    # Last three characters of parameter
                    mu_type = each_para_name[-2:]
                    if mu_type == 'mu':
                        mu_list.append(each_para_name)
                        # multi_compat_model.set_fixed_parameter(each_para_name, 1.7e-9)

                if len(mu_list) == 2:
                    multi_compat_model.set_equal_parameter(
                        mu_list[0], mu_list[1])
                # End of mu conditions

                # This file name is common to all voxels and describes the nomenclature
                # as the selection of models that were used based on the three components
                model_file_name = cyn_key + '_' + gsn_key + '_' + sph_key + '.json'
                signal_file_name = cyn_key + '_' + gsn_key + '_' + sph_key + '_signal.txt'

                ######## FC #########
                fc_model_fit = multi_compat_model.fit(
                    Acq_Scheme,
                    fc_data,
                    use_parallel_processing=False,
                    solver='mix')
                fc_fitted_params = fc_model_fit.fitted_parameters
                fc_model_signal = fc_model_fit.predict()

                ## Save FC Signal
                fc_model_signal = fc_model_signal[0, :].tolist()
                signal_save_path = os.path.join(fc_save_path, signal_file_name)
                np.savetxt(signal_save_path, fc_model_signal)
                #################

                ## Error Calculations
                fc_mse = fc_model_fit.mean_squared_error(fc_data)
                ##

                new_params = {}
                new_params['mse'] = fc_mse.tolist()

                for key, value in fc_fitted_params.items():
                    new_params[key] = value.tolist()

                model_save_path = os.path.join(fc_save_path, model_file_name)

                with open(model_save_path, 'w') as json_file:
                    json.dump(new_params, json_file)
                json_file.close()
                #####################

                ######## LC #########
                lc_model_fit = multi_compat_model.fit(
                    Acq_Scheme,
                    lc_data,
                    use_parallel_processing=False,
                    solver='mix')
                lc_fitted_params = lc_model_fit.fitted_parameters
                lc_model_signal = lc_model_fit.predict()

                ## Save LC Signal
                lc_model_signal = lc_model_signal[0, :].tolist()
                signal_save_path = os.path.join(lc_save_path, signal_file_name)
                np.savetxt(signal_save_path, lc_model_signal)
                #################

                ## Error Calculations
                lc_mse = lc_model_fit.mean_squared_error(lc_data)
                ##

                new_params = {}
                new_params['mse'] = lc_mse.tolist()

                for key, value in lc_fitted_params.items():
                    new_params[key] = value.tolist()

                model_save_path = os.path.join(lc_save_path, model_file_name)

                with open(model_save_path, 'w') as json_file:
                    json.dump(new_params, json_file)
                json_file.close()
                #####################

                ######## SL #########
                sl_model_fit = multi_compat_model.fit(
                    Acq_Scheme,
                    sl_data,
                    use_parallel_processing=False,
                    solver='mix')
                sl_fitted_params = sl_model_fit.fitted_parameters
                sl_model_signal = sl_model_fit.predict()

                ## Save SL Signal
                sl_model_signal = sl_model_signal[0, :].tolist()
                signal_save_path = os.path.join(sl_save_path, signal_file_name)
                np.savetxt(signal_save_path, sl_model_signal)
                #################

                ## Error Calculations
                sl_mse = sl_model_fit.mean_squared_error(sl_data)
                ##

                new_params = {}
                new_params['mse'] = sl_mse.tolist()

                for key, value in sl_fitted_params.items():
                    new_params[key] = value.tolist()

                model_save_path = os.path.join(sl_save_path, model_file_name)

                with open(model_save_path, 'w') as json_file:
                    json.dump(new_params, json_file)
                json_file.close()
                ######################

                ######## VC ##########
                vc_model_fit = multi_compat_model.fit(
                    Acq_Scheme,
                    vc_data,
                    use_parallel_processing=False,
                    solver='mix')
                vc_fitted_params = vc_model_fit.fitted_parameters
                vc_model_signal = vc_model_fit.predict()

                ## Save VC Signal
                vc_model_signal = vc_model_signal[0, :].tolist()
                signal_save_path = os.path.join(vc_save_path, signal_file_name)
                np.savetxt(signal_save_path, vc_model_signal)
                #################

                ## Error Calculations
                vc_mse = vc_model_fit.mean_squared_error(vc_data)
                ##

                new_params = {}
                new_params['mse'] = vc_mse.tolist()

                for key, value in vc_fitted_params.items():
                    new_params[key] = value.tolist()

                model_save_path = os.path.join(vc_save_path, model_file_name)

                with open(model_save_path, 'w') as json_file:
                    json.dump(new_params, json_file)
                json_file.close()
                ######################

                ######## VH #########
                vh_model_fit = multi_compat_model.fit(
                    Acq_Scheme,
                    vh_data,
                    use_parallel_processing=False,
                    solver='mix')
                vh_fitted_params = vh_model_fit.fitted_parameters
                vh_model_signal = vh_model_fit.predict()

                ## Save VH Signal
                vh_model_signal = vh_model_signal[0, :].tolist()
                signal_save_path = os.path.join(vh_save_path, signal_file_name)
                np.savetxt(signal_save_path, vh_model_signal)
                #################

                ## Error Calculations
                vh_mse = vh_model_fit.mean_squared_error(vh_data)
                ##

                new_params = {}
                new_params['mse'] = vh_mse.tolist()

                for key, value in vh_fitted_params.items():
                    new_params[key] = value.tolist()

                model_save_path = os.path.join(vh_save_path, model_file_name)

                with open(model_save_path, 'w') as json_file:
                    json.dump(new_params, json_file)
                json_file.close()
                ######################

                print('Model Completed with Combination of {} and {} and {}'.
                      format(cyn_key, gsn_key, sph_key))

    print('All Done')
def main():
    #Argparse Stuff
    parser = argparse.ArgumentParser(description='subject_id')
    parser.add_argument('--subject_id', type=str, default='135124')
    args = parser.parse_args()

    # Plot Save Path
    base_plot_path = r'/nfs/masi/nathv/py_src_code_2020/dmipy_model_pictures'
    base_plot_path = os.path.normpath(base_plot_path)

    # Method Saving Paths
    # TODO KARTHIK
    base_save_path = r'/root/hcp_results'
    base_save_path = os.path.normpath(base_save_path)
    if os.path.exists(base_save_path)==False:
        os.mkdir(base_save_path)

    # Create base saving path for Method
    # TODO The Method name can be made an argument later on
    method_name = 'NODDI_WATSON'

    # Base HCP Data Path
    # TODO KARTHIK This is where we hard set HCP's Data Path
    base_data_path = r'/root/local_mount/data'
    base_data_path = os.path.normpath(base_data_path)

    # Subject ID
    subj_ID = args.subject_id

    # Subject Save Path
    subj_save_path = os.path.join(base_save_path, subj_ID)
    if os.path.exists(subj_save_path)==False:
        os.mkdir(subj_save_path)

    # TODO For later the subject data, bval and bvec reading part can be put inside a function
    subj_data_path = os.path.join(base_data_path, subj_ID, 'T1w', 'Diffusion')

    # Read the Nifti file, bvals and bvecs
    subj_bvals = np.loadtxt(os.path.join(subj_data_path, 'bvals'))
    subj_bvecs = np.loadtxt(os.path.join(subj_data_path, 'bvecs'))

    all_bvals = subj_bvals * 1e6
    all_bvecs = np.transpose(subj_bvecs)

    subj_Acq_Scheme = acquisition_scheme_from_bvalues(all_bvals, all_bvecs)
    print(subj_Acq_Scheme.print_acquisition_info)

    print('Loading the Nifti Data ...')
    data_start_time = time.time()

    subj_babel_object = nib.load(os.path.join(subj_data_path, 'data.nii.gz'))
    subj_data = subj_babel_object.get_fdata()
    axial_slice_data = subj_data[50:65, 50:65, 60:62, :]

    data_end_time = time.time()
    data_time = np.int(np.round(data_end_time - data_start_time))

    print('Data Loaded ... Time Taken: {}'.format(data_end_time - data_start_time))
    print('The Data Dimensions are: {}'.format(subj_data.shape))

    #### NODDI Watson ####
    ball = gaussian_models.G1Ball()
    stick = cylinder_models.C1Stick()
    zeppelin = gaussian_models.G2Zeppelin()

    watson_dispersed_bundle = SD1WatsonDistributed(models=[stick, zeppelin])

    watson_dispersed_bundle.set_tortuous_parameter('G2Zeppelin_1_lambda_perp', 'C1Stick_1_lambda_par',
                                                   'partial_volume_0')
    watson_dispersed_bundle.set_equal_parameter('G2Zeppelin_1_lambda_par', 'C1Stick_1_lambda_par')
    watson_dispersed_bundle.set_fixed_parameter('G2Zeppelin_1_lambda_par', 1.7e-9)

    NODDI_mod = MultiCompartmentModel(models=[ball, watson_dispersed_bundle])
    NODDI_mod.set_fixed_parameter('G1Ball_1_lambda_iso', 3e-9)

    print('Fitting the NODDI Model ...')
    fit_start_time = time.time()
    NODDI_fit_hcp = NODDI_mod.fit(subj_Acq_Scheme,
                                  subj_data,
                                  mask=subj_data[..., 0] > 0,
                                  use_parallel_processing=True,
                                  number_of_processors=32)
    fit_end_time = time.time()
    print('Model Fitting Completed ... Time Taken to fit: {}'.format(fit_end_time - fit_start_time))
    fit_time = np.int(np.round(fit_end_time - fit_start_time))

    fitted_parameters = NODDI_fit_hcp.fitted_parameters

    para_Names_list = []
    for key, value in fitted_parameters.items():
        para_Names_list.append(key)

    ### Nifti Saving Part
    # Create a directory per subject
    subj_method_save_path = os.path.join(subj_save_path, method_name)
    if os.path.exists(subj_method_save_path)==False:
        os.mkdir(subj_method_save_path)

    # Retrieve the affine from already Read Nifti file to form the header
    affine = subj_babel_object.affine

    # Loop over fitted parameters name list
    for each_fitted_parameter in para_Names_list:
        new_img = nib.Nifti1Image(fitted_parameters[each_fitted_parameter], affine)

        # Form the file path
        f_name = each_fitted_parameter + '.nii.gz'
        param_file_path = os.path.join(subj_method_save_path, f_name)

        nib.save(new_img, param_file_path)

    return None
Exemplo n.º 7
0
def main():
    #Argparse Stuff
    parser = argparse.ArgumentParser(description='subject_id')
    parser.add_argument('--subject_id', type=str, default='135124')
    args = parser.parse_args()

    # Plot Save Path
    base_plot_path = r'/nfs/masi/nathv/py_src_code_2020/dmipy_model_pictures'
    base_plot_path = os.path.normpath(base_plot_path)

    # Method Saving Paths
    # TODO KARTHIK
    base_save_path = r'/root/hcp_results'
    base_save_path = os.path.normpath(base_save_path)
    if os.path.exists(base_save_path) == False:
        os.mkdir(base_save_path)

    # Create base saving path for Method
    # TODO The Method name can be made an argument later on
    method_name = 'VERDICT'

    # Base HCP Data Path
    # TODO KARTHIK This is where we hard set HCP's Data Path
    base_data_path = r'/root/local_mount/data'
    base_data_path = os.path.normpath(base_data_path)

    # Subject ID
    subj_ID = args.subject_id

    # Subject Save Path
    subj_save_path = os.path.join(base_save_path, subj_ID)
    if os.path.exists(subj_save_path) == False:
        os.mkdir(subj_save_path)

    # TODO For later the subject data, bval and bvec reading part can be put inside a function
    subj_data_path = os.path.join(base_data_path, subj_ID, 'T1w', 'Diffusion')

    # Read the Nifti file, bvals and bvecs
    subj_bvals = np.loadtxt(os.path.join(subj_data_path, 'bvals'))
    subj_bvecs = np.loadtxt(os.path.join(subj_data_path, 'bvecs'))

    all_bvals = subj_bvals * 1e6
    all_bvecs = np.transpose(subj_bvecs)

    subj_Acq_Scheme = acquisition_scheme_from_bvalues(all_bvals,
                                                      all_bvecs,
                                                      delta=10.6 * 1e-3,
                                                      Delta=43.1 * 1e-3,
                                                      TE=89.5 * 1e-3)

    print(subj_Acq_Scheme.print_acquisition_info)

    print('Loading the Nifti Data ...')
    data_start_time = time.time()

    subj_babel_object = nib.load(os.path.join(subj_data_path, 'data.nii.gz'))
    subj_data = subj_babel_object.get_fdata()
    #axial_slice_data = subj_data[55:60, 65:70, 60:62, :]

    mask_babel_object = nib.load(
        os.path.join(subj_data_path, 'nodif_brain_mask.nii.gz'))
    mask_data = mask_babel_object.get_fdata()

    data_end_time = time.time()
    data_time = np.int(np.round(data_end_time - data_start_time))

    print('Data Loaded ... Time Taken: {}'.format(data_end_time -
                                                  data_start_time))
    print('The Data Dimensions are: {}'.format(subj_data.shape))

    #### Verdict Begin ####
    sphere = sphere_models.S4SphereGaussianPhaseApproximation(
        diffusion_constant=0.9e-9)
    ball = gaussian_models.G1Ball()
    stick = cylinder_models.C1Stick()

    verdict_mod = MultiCompartmentModel(models=[sphere, ball, stick])

    verdict_mod.set_fixed_parameter('G1Ball_1_lambda_iso', 0.9e-9)
    verdict_mod.set_parameter_optimization_bounds('C1Stick_1_lambda_par',
                                                  [3.05e-9, 10e-9])

    print('Fitting the Verdict Model ...')
    fit_start_time = time.time()
    mcdmi_fit = verdict_mod.fit(subj_Acq_Scheme,
                                subj_data,
                                mask=mask_data,
                                solver='mix',
                                use_parallel_processing=True,
                                number_of_processors=64)

    fit_end_time = time.time()
    print('Model Fitting Completed ... Time Taken to fit: {}'.format(
        fit_end_time - fit_start_time))
    fit_time = np.int(np.round(fit_end_time - fit_start_time))

    fitted_parameters = mcdmi_fit.fitted_parameters

    # Get List of Estimated Parameter Names
    para_Names_list = []
    for key, value in fitted_parameters.items():
        para_Names_list.append(key)

    ### Nifti Saving Part
    # Create a directory per subject
    subj_method_save_path = os.path.join(subj_save_path, method_name)
    if os.path.exists(subj_method_save_path) == False:
        os.mkdir(subj_method_save_path)

    # Retrieve the affine from already Read Nifti file to form the header
    affine = subj_babel_object.affine

    # Loop over fitted parameters name list
    for each_fitted_parameter in para_Names_list:
        new_img = nib.Nifti1Image(fitted_parameters[each_fitted_parameter],
                                  affine)

        # Form the file path
        f_name = each_fitted_parameter + '.nii.gz'
        param_file_path = os.path.join(subj_method_save_path, f_name)

        nib.save(new_img, param_file_path)

    return None
Exemplo n.º 8
0
def main():
    # Plot Save Path
    base_plot_path = r'/nfs/masi/nathv/py_src_code_2020/dmipy_model_pictures'
    base_plot_path = os.path.normpath(base_plot_path)

    # Method Saving Paths
    base_save_path = r'/nfs/masi/nathv/miccai_2020/micro_methods_hcp_mini'
    base_save_path = os.path.normpath(base_save_path)

    # Create base saving path for Method
    # TODO The Method name can be made an argument later on
    method_name = 'NODDI_WATSON'

    # Base HCP Data Path
    base_data_path = r'/nfs/HCP/data'
    base_data_path = os.path.normpath(base_data_path)

    # Subject ID's list
    #subj_ID_List = ['125525', '118225', '116726']
    subj_ID_List = ['115017', '114823', '116726', '118225']
    # TODO When needed loop here over the ID list
    for subj_ID in subj_ID_List:
        # Subject Save Path
        subj_save_path = os.path.join(base_save_path, subj_ID)
        if os.path.exists(subj_save_path) == False:
            os.mkdir(subj_save_path)

        # TODO For later the subject data, bval and bvec reading part can be put inside a function
        subj_data_path = os.path.join(base_data_path, subj_ID, 'T1w',
                                      'Diffusion')

        # Read the Nifti file, bvals and bvecs
        subj_bvals = np.loadtxt(os.path.join(subj_data_path, 'bvals'))
        subj_bvecs = np.loadtxt(os.path.join(subj_data_path, 'bvecs'))

        all_bvals = subj_bvals * 1e6
        all_bvecs = np.transpose(subj_bvecs)

        subj_Acq_Scheme = acquisition_scheme_from_bvalues(all_bvals, all_bvecs)
        print(subj_Acq_Scheme.print_acquisition_info)

        print('Loading the Nifti Data ...')
        data_start_time = time.time()

        subj_babel_object = nib.load(
            os.path.join(subj_data_path, 'data.nii.gz'))
        subj_data = subj_babel_object.get_fdata()
        axial_slice_data = subj_data[50:65, 50:65, 60:62, :]

        data_end_time = time.time()
        data_time = np.int(np.round(data_end_time - data_start_time))

        print('Data Loaded ... Time Taken: {}'.format(data_end_time -
                                                      data_start_time))
        print('The Data Dimensions are: {}'.format(subj_data.shape))

        #### NODDI Watson ####
        ball = gaussian_models.G1Ball()
        stick = cylinder_models.C1Stick()
        zeppelin = gaussian_models.G2Zeppelin()

        watson_dispersed_bundle = SD1WatsonDistributed(
            models=[stick, zeppelin])

        watson_dispersed_bundle.set_tortuous_parameter(
            'G2Zeppelin_1_lambda_perp', 'C1Stick_1_lambda_par',
            'partial_volume_0')
        watson_dispersed_bundle.set_equal_parameter('G2Zeppelin_1_lambda_par',
                                                    'C1Stick_1_lambda_par')
        watson_dispersed_bundle.set_fixed_parameter('G2Zeppelin_1_lambda_par',
                                                    1.7e-9)

        NODDI_mod = MultiCompartmentModel(
            models=[ball, watson_dispersed_bundle])
        NODDI_mod.set_fixed_parameter('G1Ball_1_lambda_iso', 3e-9)

        print('Fitting the NODDI Model ...')
        fit_start_time = time.time()
        NODDI_fit_hcp = NODDI_mod.fit(subj_Acq_Scheme,
                                      subj_data,
                                      mask=subj_data[..., 0] > 0)
        fit_end_time = time.time()
        print('Model Fitting Completed ... Time Taken to fit: {}'.format(
            fit_end_time - fit_start_time))
        fit_time = np.int(np.round(fit_end_time - fit_start_time))

        fitted_parameters = NODDI_fit_hcp.fitted_parameters

        para_Names_list = []
        for key, value in fitted_parameters.items():
            para_Names_list.append(key)

        ### Nifti Saving Part
        # Create a directory per subject
        subj_method_save_path = os.path.join(subj_save_path, method_name)
        if os.path.exists(subj_method_save_path) == False:
            os.mkdir(subj_method_save_path)

        # Retrieve the affine from already Read Nifti file to form the header
        affine = subj_babel_object.affine

        # Loop over fitted parameters name list
        for each_fitted_parameter in para_Names_list:
            new_img = nib.Nifti1Image(fitted_parameters[each_fitted_parameter],
                                      affine)

            # Form the file path
            f_name = each_fitted_parameter + '.nii.gz'
            param_file_path = os.path.join(subj_method_save_path, f_name)

            nib.save(new_img, param_file_path)

    return None
def fit_noddi_dmipy(input_dwi,
                    input_bval,
                    input_bvec,
                    input_mask,
                    output_dir,
                    nthreads=1,
                    solver='brute2fine',
                    model_type='WATSON',
                    parallel_diffusivity=1.7e-9,
                    iso_diffusivity=3e-9,
                    bids_fmt=False,
                    bids_id=''):

    import nibabel as nib
    from dmipy.signal_models import cylinder_models, gaussian_models
    from dmipy.distributions.distribute_models import SD1WatsonDistributed, SD2BinghamDistributed
    from dmipy.core.modeling_framework import MultiCompartmentModel
    from dmipy.core import modeling_framework
    from dmipy.core.acquisition_scheme import acquisition_scheme_from_bvalues
    from dipy.io import read_bvals_bvecs

    if not os.path.exists(output_dir):
        os.mkdir(output_dir)

    #Setup the acquisition scheme
    bvals, bvecs = read_bvals_bvecs(input_bval, input_bvec)
    bvals_SI = bvals * 1e6
    acq_scheme = acquisition_scheme_from_bvalues(bvals_SI, bvecs)
    acq_scheme.print_acquisition_info

    #Load the data
    img = nib.load(input_dwi)
    data = img.get_data()

    #Load the mask
    img = nib.load(input_mask)
    mask_data = img.get_data()

    ball = gaussian_models.G1Ball()  #CSF
    stick = cylinder_models.C1Stick()  #Intra-axonal diffusion
    zeppelin = gaussian_models.G2Zeppelin()  #Extra-axonal diffusion

    if model_type == 'Bingham' or model_type == 'BINGHAM':
        dispersed_bundle = SD2BinghamDistributed(models=[stick, zeppelin])
    else:
        dispersed_bundle = SD1WatsonDistributed(models=[stick, zeppelin])

    dispersed_bundle.set_tortuous_parameter('G2Zeppelin_1_lambda_perp',
                                            'C1Stick_1_lambda_par',
                                            'partial_volume_0')
    dispersed_bundle.set_equal_parameter('G2Zeppelin_1_lambda_par',
                                         'C1Stick_1_lambda_par')
    dispersed_bundle.set_fixed_parameter('G2Zeppelin_1_lambda_par',
                                         parallel_diffusivity)

    NODDI_mod = MultiCompartmentModel(models=[ball, dispersed_bundle])
    NODDI_mod.set_fixed_parameter('G1Ball_1_lambda_iso', iso_diffusivity)
    NODDI_fit = NODDI_mod.fit(acq_scheme,
                              data,
                              mask=mask_data,
                              number_of_processors=nthreads,
                              solver=solver)

    fitted_parameters = NODDI_fit.fitted_parameters

    if model_type == 'Bingham' or model_type == 'BINGHAM':
        # get total Stick signal contribution
        vf_intra = (
            fitted_parameters['SD2BinghamDistributed_1_partial_volume_0'] *
            fitted_parameters['partial_volume_1'])

        # get total Zeppelin signal contribution
        vf_extra = (
            (1 - fitted_parameters['SD2BinghamDistributed_1_partial_volume_0'])
            * fitted_parameters['partial_volume_1'])
        vf_iso = fitted_parameters['partial_volume_0']
        odi = fitted_parameters['SD2BinghamDistributed_1_SD2Bingham_1_odi']

    else:
        # get total Stick signal contribution
        vf_intra = (
            fitted_parameters['SD1WatsonDistributed_1_partial_volume_0'] *
            fitted_parameters['partial_volume_1'])

        # get total Zeppelin signal contribution
        vf_extra = (
            (1 - fitted_parameters['SD1WatsonDistributed_1_partial_volume_0'])
            * fitted_parameters['partial_volume_1'])
        vf_iso = fitted_parameters['partial_volume_0']
        odi = fitted_parameters['SD1WatsonDistributed_1_SD1Watson_1_odi']

    if bids_fmt:
        output_odi = output_dir + '/' + bids_id + '_model-NODDI_parameter-ODI.nii.gz'
        output_vf_intra = output_dir + '/' + bids_id + '_model-NODDI_parameter-ICVF.nii.gz'
        output_vf_extra = output_dir + '/' + bids_id + '_model-NODDI_parameter-EXVF.nii.gz'
        output_vf_iso = output_dir + '/' + bids_id + '_model-NODDI_parameter-ISO.nii.gz'
    else:
        output_odi = output_dir + '/noddi_ODI.nii.gz'
        output_vf_intra = output_dir + '/noddi_ICVF.nii.gz'
        output_vf_extra = output_dir + '/noddi_EXVF.nii.gz'
        output_vf_iso = output_dir + '/noddi_ISO.nii.gz'

    #Save the images
    odi_img = nib.Nifti1Image(odi, img.get_affine(), img.header)
    odi_img.set_sform(img.get_sform())
    odi_img.set_qform(img.get_qform())
    nib.save(odi_img, output_odi)

    icvf_img = nib.Nifti1Image(vf_intra, img.get_affine(), img.header)
    icvf_img.set_sform(img.get_sform())
    icvf_img.set_qform(img.get_qform())
    nib.save(icvf_img, output_vf_intra)

    ecvf_img = nib.Nifti1Image(vf_extra, img.get_affine(), img.header)
    ecvf_img.set_sform(img.get_sform())
    ecvf_img.set_qform(img.get_qform())
    nib.save(ecvf_img, output_vf_extra)

    iso_img = nib.Nifti1Image(vf_iso, img.get_affine(), img.header)
    iso_img.set_sform(img.get_sform())
    iso_img.set_qform(img.get_qform())
    nib.save(iso_img, output_vf_iso)
Exemplo n.º 10
0
    mask = nib.load(allMaskNames[iMask]).get_data()

    ball = gaussian_models.G1Ball()
    stick = cylinder_models.C1Stick()
    zeppelin = gaussian_models.G2Zeppelin()

    watson_dispersed_bundle1 = SD1WatsonDistributed(models=[stick, zeppelin])

    print watson_dispersed_bundle1.parameter_names

    watson_dispersed_bundle1.set_tortuous_parameter('G2Zeppelin_1_lambda_perp','C1Stick_1_lambda_par','partial_volume_0')
    watson_dispersed_bundle1.set_equal_parameter('G2Zeppelin_1_lambda_par', 'C1Stick_1_lambda_par')
    watson_dispersed_bundle1.set_fixed_parameter('G2Zeppelin_1_lambda_par', 1.7e-9)

    watson_dispersed_bundle2 = watson_dispersed_bundle1.copy()
    NODDIx_mod = MultiCompartmentModel(models=[ball, watson_dispersed_bundle1, watson_dispersed_bundle2])

    print NODDIx_mod.parameter_names

    NODDIx_mod.set_fixed_parameter('G1Ball_1_lambda_iso', 3e-9)

    NODDIx_fit = NODDIx_mod.fit(acq_scheme, dwi, mask = mask, solver='mix', maxiter=300)

    hdr = dwi_nii.header

    # create output folder
    dir_sub = os.path.join(directory, "%03d" % iMask)
    if os.path.exists(dir_sub) == False:
        os.mkdir(dir_sub)
        
    sphere = get_sphere(name='repulsion200').subdivide()
Exemplo n.º 11
0
    bvalues_SI = bvalues * 1e6 
    acq_scheme = acquisition_scheme_from_bvalues(bvalues_SI, gradient_directions_normalized, delta, Delta)
    # gtab_dipy = gradient_table(bvalues, gradient_directions, big_delta=Delta, small_delta=delta, atol=3e-2)
    # acq_scheme = gtab_dipy2mipy(gtab_dipy)

    acq_scheme.print_acquisition_info

    dwi_nii = nib.load(allDwiNames[iMask])
    dwi = dwi_nii.get_data()
    mask = nib.load(allMaskNames[iMask]).get_data()

    ball = gaussian_models.G1Ball()
    cylinder = cylinder_models.C4CylinderGaussianPhaseApproximation()
    gamma_cylinder = distribute_models.DD1GammaDistributed(models=[cylinder])

    axcaliber_gamma = MultiCompartmentModel(models=[ball, gamma_cylinder])
    print axcaliber_gamma.parameter_cardinality

    axcaliber_gamma.set_fixed_parameter('DD1GammaDistributed_1_C4CylinderGaussianPhaseApproximation_1_lambda_par', 1.7e-9)
    axcaliber_gamma.set_fixed_parameter('DD1GammaDistributed_1_C4CylinderGaussianPhaseApproximation_1_mu', [0, 0])

    axcaliber_gamma_fit = axcaliber_gamma.fit(acq_scheme, dwi, mask = mask, solver='mix', maxiter=100)

    fitted_parameters = axcaliber_gamma_fit.fitted_parameters

    hdr = dwi_nii.header

    # create output folder
    dir_sub = os.path.join(directory, "%03d" % iMask)
    if os.path.exists(dir_sub) == False:
        os.mkdir(dir_sub)
zeppelin = gaussian_models.G2Zeppelin()  #Extra-axonal diffusion

if model_type == 'Bingham' or model_type == 'BINGHAM':
    dispersed_bundle = SD2BinghamDistributed(models=[stick, zeppelin])
else:
    dispersed_bundle = SD1WatsonDistributed(models=[stick, zeppelin])

dispersed_bundle.set_tortuous_parameter('G2Zeppelin_1_lambda_perp',
                                        'C1Stick_1_lambda_par',
                                        'partial_volume_0')
dispersed_bundle.set_equal_parameter('G2Zeppelin_1_lambda_par',
                                     'C1Stick_1_lambda_par')
dispersed_bundle.set_fixed_parameter('G2Zeppelin_1_lambda_par',
                                     parallel_diffusivity)

NODDI_mod = MultiCompartmentModel(models=[ball, dispersed_bundle])
NODDI_mod.set_fixed_parameter('G1Ball_1_lambda_iso', iso_diffusivity)
NODDI_fit = NODDI_mod.fit(acq_scheme,
                          data,
                          mask=mask_data,
                          number_of_processors=nthreads,
                          solver=solver)

fitted_parameters = NODDI_fit.fitted_parameters

if model_type == 'Bingham' or model_type == 'BINGHAM':
    # get total Stick signal contribution
    vf_intra = (fitted_parameters['SD2BinghamDistributed_1_partial_volume_0'] *
                fitted_parameters['partial_volume_1'])

    # get total Zeppelin signal contribution
Exemplo n.º 13
0
def ivim_Dstar_fixed(acquisition_scheme,
                     data,
                     mask=None,
                     Dstar_value=7e-9,
                     solver='brute2fine',
                     **fit_args):
    """
    Implementation of second best performing IVIM algorithm following [1]_.
    Basically, it is just a non-linear least squares fit with fixing the
    blood diffusivity Dstar to 7e-3 mm^2/s. This value apparently improves the
    stability of the fit (in healthy volunteers) [2]_.

    The optimization range for the tissue diffusivity is set to
    [0.5 - 6]e-3 mm^2/s to improve precision [3]_.

    In the fitted ivim_fit model, partial_volume_0 and G1Ball_1_lambda_iso
    represent the tissue fraction and diffusivity, and partial_volume_1 and
    G1Ball_2_lambda_iso represent the blood fraction and diffusivity.

    Parameters
    ----------
    acquisition_scheme: Dmipy AcquisitionScheme instance,
        acquisition scheme containing all the information of the ivim
        acquisition.
    data: ND-array of shape (Nx, ..., N_DWI),
        measured data corresponding to the acquisition scheme.
    mask : (N-1)-dimensional integer/boolean array of size (N_x, N_y, ...),
        Optional mask of voxels to be included in the optimization.
    Dstar_value: float,
        the fixed Dstar blood diffusivity value. Default: 7e-9 m^2/s [2]_.
    solver: float,
        which solver to use for the algorithm. Default: 'brute2fine'.
    fit_args: other keywords that are passed to the optimizer

    Returns
    -------
    ivim_fit: Dmipy FittedMultiCompartmentModel instance,
        contains the fitted IVIM parameters.

    References
    ----------
    .. [1] Gurney-Champion, O. J., Klaassen, R., Froeling, M., Barbieri, S.,
        Stoker, J., Engelbrecht, M. R., ... & Nederveen, A. J. (2018).
        Comparison of six fit algorithms for the intra-voxel incoherent motion
        model of diffusion-weighted magnetic resonance imaging data of
        pancreatic cancer patients. PloS one, 13(4), e0194590.
    .. [2] Gurney-Champion OJ, Froeling M, Klaassen R, Runge JH, Bel A, Van
        Laarhoven HWM, et al. Minimizing the Acquisition Time for Intravoxel
        Incoherent Motion Magnetic Resonance Imaging Acquisitions in the Liver
        and Pancreas. Invest Radiol. 2016;51: 211–220.
    .. [3] Park HJ, Sung YS, Lee SS, Lee Y, Cheong H, Kim YJ, et al. Intravoxel
        incoherent motion diffusion-weighted MRI of the abdomen: The effect of
        fitting algorithms on the accuracy and reliability of the parameters.
        J Magn Reson Imaging. 2017;45: 1637–1647.
    """
    start = time()

    if fit_args is None:
        fit_args = {}

    print('Starting IVIM Dstar-fixed algorithm.')
    ivim_mod = MultiCompartmentModel([G1Ball(), G1Ball()])
    ivim_mod.set_fixed_parameter('G1Ball_2_lambda_iso',
                                 Dstar_value)  # following [2]
    ivim_mod.set_parameter_optimization_bounds('G1Ball_1_lambda_iso',
                                               [.5e-9, 6e-9])  # following [3]
    ivim_fit = ivim_mod.fit(acquisition_scheme=acquisition_scheme,
                            data=data,
                            mask=mask,
                            solver=solver,
                            **fit_args)
    computation_time = time() - start
    N_voxels = np.sum(ivim_fit.mask)
    msg = 'IVIM Dstar-fixed optimization of {0:d} voxels'.format(N_voxels)
    msg += ' complete in {0:.3f} seconds'.format(computation_time)
    print(msg)
    return ivim_fit
Exemplo n.º 14
0
def main():
    # Plot Save Path
    base_plot_path = r'/nfs/masi/nathv/py_src_code_2020/dmipy_model_pictures'
    base_plot_path = os.path.normpath(base_plot_path)

    # Method Saving Paths
    base_save_path = r'/nfs/masi/nathv/miccai_2020/micro_methods_hcp_mini'
    base_save_path = os.path.normpath(base_save_path)

    # Create base saving path for Method
    # TODO The Method name can be made an argument later on
    method_name = 'VERDICT'

    # Base HCP Data Path
    base_data_path = r'/nfs/HCP/data'
    base_data_path = os.path.normpath(base_data_path)

    # Subject ID's list
    subj_ID_List = ['125525', '118225', '116726', '115825', '115017', '114823']
    # TODO When needed loop here over the ID list
    for subj_ID in subj_ID_List:
        # Subject Save Path
        subj_save_path = os.path.join(base_save_path, subj_ID)
        if os.path.exists(subj_save_path) == False:
            os.mkdir(subj_save_path)

        # TODO For later the subject data, bval and bvec reading part can be put inside a function
        subj_data_path = os.path.join(base_data_path, subj_ID_List[0], 'T1w',
                                      'Diffusion')

        # Read the Nifti file, bvals and bvecs
        subj_bvals = np.loadtxt(os.path.join(subj_data_path, 'bvals'))
        subj_bvecs = np.loadtxt(os.path.join(subj_data_path, 'bvecs'))

        all_bvals = subj_bvals * 1e6
        all_bvecs = np.transpose(subj_bvecs)

        subj_Acq_Scheme = acquisition_scheme_from_bvalues(all_bvals,
                                                          all_bvecs,
                                                          delta=10.6 * 1e-3,
                                                          Delta=43.1 * 1e-3,
                                                          TE=89.5 * 1e-3)
        print(subj_Acq_Scheme.print_acquisition_info)

        print('Loading the Nifti Data ...')
        data_start_time = time.time()

        subj_babel_object = nib.load(
            os.path.join(subj_data_path, 'data.nii.gz'))
        subj_data = subj_babel_object.get_fdata()
        axial_slice_data = subj_data[55:60, 65:70, 60:62, :]

        data_end_time = time.time()
        data_time = np.int(np.round(data_end_time - data_start_time))

        print('Data Loaded ... Time Taken: {}'.format(data_end_time -
                                                      data_start_time))
        print('The Data Dimensions are: {}'.format(subj_data.shape))

        #### Verdict Begin ####
        sphere = sphere_models.S4SphereGaussianPhaseApproximation(
            diffusion_constant=0.9e-9)
        ball = gaussian_models.G1Ball()
        stick = cylinder_models.C1Stick()

        verdict_mod = MultiCompartmentModel(models=[sphere, ball, stick])

        verdict_mod.set_fixed_parameter('G1Ball_1_lambda_iso', 0.9e-9)
        verdict_mod.set_parameter_optimization_bounds('C1Stick_1_lambda_par',
                                                      [3.05e-9, 10e-9])

        print('Fitting the Verdict Model ...')
        fit_start_time = time.time()
        mcdmi_fit = verdict_mod.fit(subj_Acq_Scheme,
                                    axial_slice_data,
                                    mask=axial_slice_data[..., 0] > 0,
                                    solver='mix',
                                    use_parallel_processing=False)
        fit_end_time = time.time()
        print('Model Fitting Completed ... Time Taken to fit: {}'.format(
            fit_end_time - fit_start_time))
        fit_time = np.int(np.round(fit_end_time - fit_start_time))

        fitted_parameters = mcdmi_fit.fitted_parameters

        # Get List of Estimated Parameter Names
        para_Names_list = []
        for key, value in fitted_parameters.items():
            para_Names_list.append(key)

        ### Nifti Saving Part
        # Create a directory per subject
        subj_method_save_path = os.path.join(subj_save_path, method_name)
        if os.path.exists(subj_method_save_path) == False:
            os.mkdir(subj_method_save_path)

        # Retrieve the affine from already Read Nifti file to form the header
        affine = subj_babel_object.affine

        # Loop over fitted parameters name list
        for each_fitted_parameter in para_Names_list:
            new_img = nib.Nifti1Image(fitted_parameters[each_fitted_parameter],
                                      affine)

            # Form the file path
            f_name = each_fitted_parameter + '.nii.gz'
            param_file_path = os.path.join(subj_method_save_path, f_name)

            nib.save(new_img, param_file_path)

    return None
def test_tissue_response_model_multi_compartment_models():
    ball = G1Ball(lambda_iso=2.5e-9)
    data_iso = ball(scheme)
    data_iso_sm = ball.spherical_mean(scheme)
    S0_iso, iso_model = estimate_TR1_isotropic_tissue_response_model(
        scheme, np.atleast_2d(data_iso))

    zeppelin = G2Zeppelin(lambda_par=1.7e-9,
                          lambda_perp=1e-9,
                          mu=[np.pi / 2, np.pi / 2])
    data_aniso = zeppelin(scheme)
    data_aniso_sm = zeppelin.spherical_mean(scheme)
    S0_aniso, aniso_model = estimate_TR2_anisotropic_tissue_response_model(
        scheme, np.atleast_2d(data_aniso))
    models = [iso_model, aniso_model]

    mc = MultiCompartmentModel(models)
    mc_smt = MultiCompartmentSphericalMeanModel(models)

    test_mc_data = 0.5 * data_iso + 0.5 * data_aniso
    test_mc_data_sm = 0.5 * data_iso_sm + 0.5 * data_aniso_sm
    test_data = [test_mc_data, test_mc_data_sm]

    params = {
        'partial_volume_0': [0.5],
        'partial_volume_1': [0.5],
        'TR2AnisotropicTissueResponseModel_1_mu':
        np.array([np.pi / 2, np.pi / 2])
    }

    mc_models = [mc, mc_smt]
    for model, data in zip(mc_models, test_data):
        data_mc = model(scheme, **params)
        assert_array_almost_equal(data, data_mc, 3)

    # csd model with single models
    mc_csd = MultiCompartmentSphericalHarmonicsModel([aniso_model])
    watson_mod = distribute_models.SD1WatsonDistributed([aniso_model])
    watson_params = {
        'SD1Watson_1_mu': np.array([np.pi / 2, np.pi / 2]),
        'SD1Watson_1_odi': .3
    }
    data_watson = watson_mod(scheme, **watson_params)
    mc_csd_fit = mc_csd.fit(scheme, data_watson)
    assert_array_almost_equal(mc_csd_fit.predict()[0], data_watson, 2)

    # csd model with multiple models
    mc_csd = MultiCompartmentSphericalHarmonicsModel(models)
    watson_mod = distribute_models.SD1WatsonDistributed(
        [iso_model, aniso_model])
    watson_params = {
        'SD1Watson_1_mu': np.array([np.pi / 2, np.pi / 2]),
        'SD1Watson_1_odi': .3,
        'partial_volume_0': 0.5
    }
    data_watson = watson_mod(scheme, **watson_params)
    mc_csd_fit = mc_csd.fit(scheme, data_watson)
    assert_array_almost_equal(mc_csd_fit.predict()[0], data_watson, 2)

    scheme_panagiotaki = panagiotaki_verdict_acquisition_scheme()
    assert_raises(ValueError,
                  mc_csd.fit,
                  acquisition_scheme=scheme_panagiotaki,
                  data=data_watson)
Exemplo n.º 16
0
    stick = cylinder_models.C1Stick()
    zeppelin = gaussian_models.G2Zeppelin()

    watson_dispersed_bundle = SD1WatsonDistributed(models=[stick, zeppelin])

    print watson_dispersed_bundle.parameter_names

    watson_dispersed_bundle.set_tortuous_parameter('G2Zeppelin_1_lambda_perp',
                                                   'C1Stick_1_lambda_par',
                                                   'partial_volume_0')
    watson_dispersed_bundle.set_equal_parameter('G2Zeppelin_1_lambda_par',
                                                'C1Stick_1_lambda_par')
    watson_dispersed_bundle.set_fixed_parameter('G2Zeppelin_1_lambda_par',
                                                1.7e-9)

    NODDI_mod = MultiCompartmentModel(models=[ball, watson_dispersed_bundle])

    print NODDI_mod.parameter_names

    NODDI_mod.set_fixed_parameter('G1Ball_1_lambda_iso', 3e-9)

    NODDI_fit_hcp = NODDI_mod.fit(acq_scheme, dwi, mask=mask)

    fitted_parameters = NODDI_fit_hcp.fitted_parameters

    hdr = dwi_nii.header

    # create output folder
    dir_sub = os.path.join(directory, "%03d" % iMask)
    if os.path.exists(dir_sub) == False:
        os.mkdir(dir_sub)