示例#1
0
def Wall_radius_pod(thetas,
                    liste_r_theta,
                    epsilon_wall=1.e-2,
                    epsilon_radius=1.e-2,
                    read_file=True):

    #FIRST TIME YOU USE THIS FUNCTION YOU CAN'T WRITE read_file = True
    #COMPUTING THE MATRICES MAT AND RAYON CAN BE VERY LONG. ONCE COMPUTED
    # FOR THE FIRST TIME IT IS RECOMMANDED TO STORE IT IN YOUR COMPUTER
    #AUTOMATICALLY DONE LINE 82 AND 83. THEN NEXT TIME YOU CAN CALL THEM
    #USING read_file = True AND LINE 87 and 89.

    print("----> Creation Matrix of coefficients")
    if read_file == False:
        #SET FOURIER SERIES ORDER
        f_order = 5
        #COMPUTE MATRICES WALL SHAPE AND RADIUS
        MAT, RAYON = Matrice_coefficients_wall(thetas,
                                               liste_r_theta,
                                               fourier_order=f_order)
        np.savetxt("files/MATRIX_WALL.csv", MAT, delimiter=', ')
        np.savetxt("files/MATRIX_RAYON.csv", MAT, delimiter=', ')
    else:
        print("------> Reading existing file")
        MAT = gf.Read_parametrization("files/MATRIX_WALL.csv")
        RAYON = gf.Read_parametrization("files/MATRIX_RADIUS.csv")

    print("----> Creation of wall reduced basis")
    print("------> Epsilon Wall : ", epsilon_wall)
    PHI_WALL = Proper_orthogonal_decomposition(MAT,
                                               epsilon_wall,
                                               nb_mods=False,
                                               plot=False)
    #err_one_left_wall = One_left_out(MAT, epsilon_wall, plot = True)
    #print("------> Mean of one left out error : ", err_one_left_wall)

    print("----> Creation of radius reduced basis")
    print("------> Epsilon Radius : ", epsilon_radius)
    PHI_RAYON = Proper_orthogonal_decomposition(RAYON,
                                                epsilon_radius,
                                                nb_mods=False,
                                                plot=False)
    #err_one_left_radius= One_left_out(RAYON, epsilon_radius, plot = True)
    #print("------> Mean of one left out error : ", err_one_left_radius)

    #MAT_A, MAT_B = Fourier_variations(MAT)
    #PHI_A = Proper_orthogonal_decomposition(MAT_A, 1.e-2, nb_mods = False, plot = False)
    #PHI_B = Proper_orthogonal_decomposition(MAT_B, 1.e-1, nb_mods = False, plot = False)

    print("----> Compute solution's coefficients of both reduced basis")
    COEFFS_WALL = Coefficients_base_pod(MAT, PHI_WALL)
    COEFFS_RAYON = Coefficients_base_pod(RAYON, PHI_RAYON)

    #COEFFS_A = Coefficients_base_pod(MAT_A, PHI_A)
    #COEFFS_B = Coefficients_base_pod(MAT_B, PHI_B)

    return PHI_WALL, PHI_RAYON, COEFFS_WALL, COEFFS_RAYON
def Parametrization_analysis(path_parametrization):

    PARA = gf.Read_parametrization(path_parametrization)

    #WE ALWAYS BEGIN PARAMETRIZATION WITH HIGHEST Z COORDINATES
    PZ1 = PARA[0, 2]
    PZ2 = PARA[-1, 2]
    if PZ2 > PZ1:
        PARA = np.flip(PARA, axis=0)

    #COMPUTE THETAS
    print("-----> Number of thetas : ", np.shape(PARA[:, 12:])[1])

    #COMPUTE NB POINT CENTERLINE
    print("-----> Number of centerline's points : ", np.shape(PARA)[0])

    #COMPUTE AVERAGE_RADIUS
    average_radius = np.mean(PARA[:, 12:])
    print("-----> The average radius is {} mm. ".format(
        round(average_radius, 2)))

    #COMPUTE ARC LENGTH
    arc_length = stats.arc_length(1.0, PARA[:, 0:3])
    print("-----> The arc_length is {} mm. ".format(round(arc_length, 2)))

    return PARA, average_radius, arc_length
示例#3
0
def Wall_coeff_evolution_pod(thetas,
                             liste_r_theta,
                             epsilon_wall=1.e-2,
                             read_file=True):

    print("----> Creation Matrix of coefficients")
    if read_file == False:
        #SET FOURIER SERIES ORDER
        f_order = 5
        #COMPUTE MATRICES WALL SHAPE AND RADIUS
        MAT, RAYON = Matrice_coefficients_wall(thetas,
                                               liste_r_theta,
                                               fourier_order=f_order)
        np.savetxt("files/MATRIX_WALL.csv", MAT, delimiter=', ')
        np.savetxt("files/MATRIX_RAYON.csv", MAT, delimiter=', ')

    else:
        print("------> Reading existing file")
        MAT = gf.Read_parametrization("files/MATRIX_WALL.csv")

    liste_mat_coeff = Liste_coeff_fourier(MAT)

    liste_pod = []
    liste_pod_coeff = []

    for i in range(len(liste_mat_coeff)):
        print("----> Creation of reduced basis for Fourier coeff {}".format(i))
        print("------> Epsilon Wall : ", epsilon_wall)
        COEFF = liste_mat_coeff[i]
        PHI_COEFF = Proper_orthogonal_decomposition(COEFF,
                                                    epsilon_wall,
                                                    nb_mods=False,
                                                    plot=False)
        liste_pod.append(PHI_COEFF)

        print(
            "----> Compute solution's coefficients of reduced basis {}".format(
                i))
        COEFF_POD = Coefficients_base_pod(COEFF, PHI_COEFF)
        liste_pod_coeff.append(COEFF_POD)

    return liste_pod, liste_pod_coeff
def Geo_reduction_wall_radius(path_patients_data, nb_generation = 20, rot_sec = False, mesh_extractor = False):

    print("---------------------------------- START PROGRAMM ----------------------------------")

    #############################################################################
    ################ TRAINING DATA READING AND EXTRACTION #######################
    #############################################################################

    # EXTRACT DATA FROM FOLDER "path_patients_data". READ ALL PARAMETRIZATION FILES.
    print("--> DATAS EXTRACTION FROM PARAMETRIZATION FILES")
    folder = sorted([f for f in os.listdir(path_patients_data) if not f.startswith('.')], key = str.lower)
    print("----> Number of patients considered : {}".format(len(folder)))

    liste_control = []
    liste_r_theta = []
    liste_originale_centerline = []

    thetas = np.linspace(0, 2*np.pi, 160)

    #WE LOOP ON EACH PARAMETRIZATION FILE AND EXTRACT RELEVANT INFORMATION
    for i in folder :
        #CHANGE path IF YOU GAVE ANOTHER NAME TO PARAMETRIZATION FILE
        path = path_patients_data + '/' + i + '/' + i + '_parametrization.csv'
        PARA = gf.Read_parametrization(path)

        #EXTRACTION CENTERLINE
        CENTERLINE = PARA[:,0:3]
        liste_originale_centerline.append(CENTERLINE)

        #EXTRACTION CONTROL POINTS TO USE PROCRUSTES ANALYSES AND RECONSTRUCTION B-SPLINES
        extract = np.linspace(0, np.shape(CENTERLINE)[0]-1, 10, dtype = 'int')
        CONTROL = CENTERLINE[extract,:]
        liste_control.append(CONTROL)

        #EXTRACTION OF FUNCTION R_THETA : DISTANCE TO ANGLE
        r_theta = PARA[:,12:]
        liste_r_theta.append(r_theta)

    #UNCOMMENT NEXT LINE IF YOU WANT TO STORE ORIGINAL CENTERLINE IN A FILE
    #gf.Write_csv("ORIGINAL_CENTERLINES.csv", np.vstack(liste_originale_centerline), "x, y, z")
    #############################################################################
    #############################################################################
    #############################################################################

    #############################################################################
    ################ PARAMETERS FOR GEOMETRIC MODEL REDUCTION ###################
    #############################################################################

    #EPSILON TO AUTOMATICALLY EXTRACT NUMBER OF POD MODES
    epsilon_c = 1.e-3
    epsilon_w = 1.e-3
    epsilon_r = 1.e-3
    #PARAMETERS TO RECONSTRUCT ANEURYSMS
    nb_sections = 500
    nb_thetas = 160

    #############################################################################
    #############################################################################
    #############################################################################

    print("")
    print("--> EXTRACTION CENTERLINE REDUCED BASIS")

    dilatation, PHI_CENTERLINE, COEFFS_CENTERLINE = gmr.Centerline_pod(liste_control, epsilon = epsilon_c)

    print("")
    print("--> EXTRACTION WALL AND RADIUS REDUCED BASIS")

    PHI_WALL, PHI_RAYON, COEFFS_WALL, COEFFS_RAYON = gmr.Wall_radius_pod(thetas, liste_r_theta, epsilon_wall = epsilon_w, epsilon_radius = epsilon_r, read_file = True)

    print("")
    print("--> GENERATION OF RANDOM AORTIC ANEURYSMS GEOMETRIES")

    if rot_sec == True :

        path_to_save = "results/generation_wall_radius_with_rotation"
        if os.path.exists(path_to_save):
            shutil.rmtree(path_to_save)
        os.makedirs(path_to_save)

        for i in range(nb_generation) :

            print("----> GENERATED ANEURYSM NUMBER : ", i)

            #GIVE A NAME FOR THE ANEURYSM TO SAVE IT
            name = "ANEURYSM_WALL_RADIUS_" + str(i)
            #GIVE NAME OF FOLDER IN WHICH TO STORE ANEURYSMS

            ANEVRISME, ANEVRISME_ROT = gmr.Generator_wall_radius(PHI_CENTERLINE, PHI_WALL, PHI_RAYON, COEFFS_CENTERLINE, COEFFS_WALL, COEFFS_RAYON, dilatation, nb_sections, nb_thetas, rotation_section = True)
            gf.Write_csv(path_to_save + "/reconstruction_" + name + ".csv", ANEVRISME, "x, y, z")
            gf.Write_csv(path_to_save + "/reconstruction_rotation_" + name + ".csv", ANEVRISME_ROT, "x, y, z")

            if mesh_extractor == True :
                CONTOUR = ANEVRISME[nb_sections:,:]
                geo_treat.Mesh_generation(CONTOUR, path_to_save + "/mesh_contour_" + name + ".stl", nb_sections, nb_thetas)
                geo_treat.Read_and_Smooth(path_to_save + "/mesh_contour_" + name + ".stl", path_to_save + "mesh_smooth_" + name + ".stl", coeff_smooth = 0.001)

                CONTOUR2 = ANEVRISME_ROT[nb_sections:,:]
                geo_treat.Mesh_generation(CONTOUR2, path_to_save + "/mesh_contour_rotation_" + name + ".stl", nb_sections, nb_thetas)
                geo_treat.Read_and_Smooth(path_to_save + "/mesh_contour_rotation_" + name + ".stl", path_to_save + "mesh_smooth_rotation_" + name + ".stl", coeff_smooth = 0.001)

                #UNCOMMENT NEXT LINE IF YOU WANT TO REMESH FILES (WARNING : CAN BE VERY LOOONG TO COMPUTE!)
                #geo_treat.Surface_Remesh(path_to_save + "mesh_smooth_" + name + ".stl", path_to_save + "mesh_remesh_" + name + ".stl", target_edge_length = 0.3, nb_iterations = 10)

    else :

        path_to_save = "results/generation_wall_radius_no_rotation"
        if os.path.exists(path_to_save):
            shutil.rmtree(path_to_save)
        os.makedirs(path_to_save)

        for i in range(nb_generation) :

            print("----> GENERATED ANEURYSM NUMBER : ", i)

            #GIVE A NAME FOR THE ANEURYSM TO SAVE IT
            name = "ANEURYSM_WALL_RADIUS_" + str(i)
            #GIVE NAME OF FOLDER IN WHICH TO STORE ANEURYSMS

            ANEVRISME = gmr.Generator_wall_radius(PHI_CENTERLINE, PHI_WALL, PHI_RAYON, COEFFS_CENTERLINE, COEFFS_WALL, COEFFS_RAYON, dilatation, nb_sections, nb_thetas, rotation_section = False)
            gf.Write_csv(path_to_save + "/reconstruction_" + name + ".csv", ANEVRISME, "x, y, z")

            if mesh_extractor == True :
                CONTOUR = ANEVRISME[nb_sections:,:]
                geo_treat.Mesh_generation(CONTOUR, path_to_save + "/mesh_contour_" + name + ".stl", nb_sections, nb_thetas)
                geo_treat.Read_and_Smooth(path_to_save + "/mesh_contour_" + name + ".stl", path_to_save + "mesh_smooth_" + name + ".stl", coeff_smooth = 0.001)

                #UNCOMMENT NEXT LINE IF YOU WANT TO REMESH FILES (WARNING : CAN BE VERY LOOONG TO COMPUTE!)
                #geo_treat.Surface_Remesh(path_to_save + "mesh_smooth_" + name + ".stl", path_to_save + "mesh_remesh_" + name + ".stl", target_edge_length = 0.3, nb_iterations = 10)

    return 0
def Main_generative_algorithm(path_patients_data):

    print("Start Generation Algorithm \n")

    start_time = time.time()

    #READ THE FOLDER
    folder = sorted(
        [f for f in os.listdir(path_patients_data) if not f.startswith('.')],
        key=str.lower)
    print("Number of patients data files in folder : {} \n".format(
        len(folder)))

    # Name of folder were generative surfaces are going to be stored
    path_datas = "results/gen_surfaces/training_set"
    path_datas_random = "results/gen_surfaces/predict_set"

    print(" -> Creation of folder named {} to store all generative surfaces", )
    dir1 = path_datas
    if os.path.exists(dir1):
        shutil.rmtree(dir1)
    os.makedirs(dir1)
    dir2 = path_datas_random
    if os.path.exists(dir2):
        shutil.rmtree(dir2)
    os.makedirs(dir2)

    # creating surfaces training set
    training_set = []
    liste_control = []

    #READ EACH FILE IN FOLDER
    for file in folder:

        print("Creating Generative Surface from patient {} surface ".format(
            file))

        # name of the .csv file that contains the radii
        surf_file_name = path_patients_data + '/' + file + '/' + file + '_parametrization.csv'

        # loading .csv file (surf_data is of type 'float64')
        #print("surf_file_name: ", surf_file_name, "\n")
        PARA = gf.Read_parametrization(surf_file_name)

        CENTERLINE = PARA[:, 0:3]
        extract = np.linspace(0, np.shape(CENTERLINE)[0] - 1, 10, dtype='int')
        CONTROL = CENTERLINE[extract, :]
        liste_control.append(CONTROL)

        RADIUS = PARA[:, 12:]
        n_point = np.shape(RADIUS)[0]
        n_radius = np.shape(RADIUS)[1]

        # preparing mesh for plotting
        nb_centerline_points = np.linspace(0,
                                           n_point,
                                           n_point,
                                           endpoint=True,
                                           dtype=int)
        nb_thetas = np.linspace(0,
                                n_radius,
                                n_radius,
                                endpoint=True,
                                dtype=int)
        X, Y = np.meshgrid(nb_centerline_points, nb_thetas)
        Z = RADIUS

        fig = plt.figure()
        ax = fig.add_subplot(111, projection='3d')
        ax.plot_surface(X, Y, Z.T,
                        cmap='ocean')  # Remember:  rows, cols = Z.shape
        #plt.show()

        # surface plot file name:
        surf_plot_name = path_datas + '/' + file + '_surface.png'
        plt.savefig(surf_plot_name)
        # vista 'dall'alto' (piano X-Y)
        #ax.view_init(90, 90)
        # surface plot seen from above file name:
        #surf_plot_name = surf_folder_name + '/' + name_file + '_surface_XY.png'
        #plt.savefig(surf_plot_name)

        # add surface file to list for generation algorithm
        training_set.append(RADIUS.ravel())

    ######## CENTERLINE ##############
    print(
        "Procrustes Analysis + Save dilatation coefficient + Creation Matrix of coefficients"
    )
    #EXTRACT CONTROL POINTS FROM PROCRSUTES ANALYSIS
    procrust_control, disparity = gmr.Procrustes(liste_control)
    #EXTRACT DILATATION (SCALING) FROM PROCRUSTES ANALYSIS
    dilatation = gmr.Dilatation(liste_control)
    DILATATION = np.asarray(dilatation).reshape(-1, 1)
    print("Size of dilatation training set : ", np.shape(DILATATION))
    #RECONSTRUCT NEW CENTERLINES FROM PROCRUSTES CONTROL POINTS
    procrust_bspline = gmr.Construction_bsplines(procrust_control, 200, 5)
    for i in range(len(procrust_bspline)):
        SPLINE = procrust_bspline[i]
        fig = plt.figure()
        ax = fig.add_subplot(111, projection='3d')
        ax.scatter(SPLINE[:, 0], SPLINE[:, 1],
                   SPLINE[:, 2])  # Remember:  rows, cols = Z.shape
        spline_plot_name = path_datas + '/' + str(i) + '_spline.png'
        plt.savefig(spline_plot_name)

    #DEGREE OF POLYNOMIAL APPROXIMATION
    degree = 5
    #CREATE MATRIX ON WHICH TO RUN POD OF POLYNOMIAL COEFFICIENTS
    print("Degree polynomial approximation : ", degree)
    TRAIN_COEFF = gmr.Matrice_coefficients_centerline(procrust_bspline,
                                                      degree_approx=degree)
    print("Size of coefficient training set ", np.shape(TRAIN_COEFF))

    ######### RADIUS ##################
    TRAIN_RADIUS = np.vstack(training_set)
    print("Size of the radius training set : ", np.shape(TRAIN_RADIUS))

    TRAIN = np.hstack((DILATATION, TRAIN_COEFF.T, TRAIN_RADIUS))
    print("Size of the full training set : ", np.shape(TRAIN))

    ##### DIMENSIONALITY REDUCTION ##########
    pca = PCA(0.99999, whiten=True, svd_solver='full')
    REDUCED = pca.fit_transform(TRAIN)
    print('PCA : shape of digits reduced dataset: ', np.shape(REDUCED))

    ##### PERFORM AIC TO SEARCH BEST NUMBER OF COMPONENTS ################
    min_n_components = 1
    max_n_components = np.shape(REDUCED)[0]
    n_components = np.arange(min_n_components, max_n_components, 3)
    models = [
        GMM(n, covariance_type='full', random_state=0) for n in n_components
    ]
    aics = [model.fit(REDUCED).aic(REDUCED) for model in models]

    fig = plt.figure()
    plt.plot(n_components, aics)
    #plt.show()
    plt.savefig(path_datas + '/' +
                'AIC_graph.png')  # can hide DeprecationWarning

    mini = np.argmin(aics)
    best_nb_components = n_components[mini]
    print("Best number of components is : ", best_nb_components)

    ##### PERFORM GMM WITH BEST NUMBER COMPONENTS #######################
    gmm = GMM(best_nb_components, covariance_type='full', random_state=0)
    gmm.fit(REDUCED)
    print('Convergence of GMM model fit to digits reduced dataset: ',
          gmm.converged_)

    # n_sample: sample of new surfaces
    n_sample = 30
    DATA_NEW = gmm.sample(n_sample, random_state=0)
    print('Shape of random data : ', np.shape(DATA_NEW))

    # inverse transform of the PCA object to construct the new surfaces
    NEW = pca.inverse_transform(DATA_NEW)
    print('Shape of random data after inverse PCA : ', np.shape(NEW))

    thetas = np.linspace(0, 2 * np.pi, n_radius)
    t_anevrisme = np.linspace(0, 1, 1000)

    for i in range(n_sample):

        print("Saving sample {} and create aneurysm".format(i))

        SAMPLE = NEW[i, :]
        DILATATION_SAMPLE = SAMPLE[0]
        CENTERLINE_SAMPLE = SAMPLE[1:3 * (degree + 1) + 1]
        RADIUS_SAMPLE = SAMPLE[3 * (degree + 1) + 1:]

        ### CENTERLINE ##############
        step = int(len(CENTERLINE_SAMPLE) / 3)
        coeffs_x = CENTERLINE_SAMPLE[0:step]
        coeffs_y = CENTERLINE_SAMPLE[step:2 * step]
        coeffs_z = CENTERLINE_SAMPLE[2 * step:]
        px = np.poly1d(coeffs_x)
        py = np.poly1d(coeffs_y)
        pz = np.poly1d(coeffs_z)
        der1x = np.polyder(px, m=1)
        der1y = np.polyder(py, m=1)
        der1z = np.polyder(pz, m=1)
        der2x = np.polyder(px, m=2)
        der2y = np.polyder(py, m=2)
        der2z = np.polyder(pz, m=2)

        COORD = np.zeros((len(t_anevrisme), 3))
        COORD[:, 0] = px(t_anevrisme)
        COORD[:, 1] = py(t_anevrisme)
        COORD[:, 2] = pz(t_anevrisme)
        COORD *= DILATATION_SAMPLE
        print("------> Arc Length of the centerline : ",
              stats.arc_length(1, COORD))
        BSPLINE, TAN, NOR, BI = stats.frenet_frame(COORD)

        fig = plt.figure()
        ax = fig.add_subplot(111, projection='3d')
        ax.scatter(BSPLINE[:, 0], BSPLINE[:, 1],
                   BSPLINE[:, 2])  # Remember:  rows, cols = Z.shape
        bspline_plot_name = path_datas_random + '/random_bspline_' + str(
            i) + '.png'
        plt.savefig(bspline_plot_name)

        ### RADIUS ##################
        RADIUS_SAMPLE = RADIUS_SAMPLE.reshape(n_point, n_radius)
        #np.savetxt(path_datas + '/random_' + str(i) + ".csv", SAMPLE, delimiter = ',')

        fig = plt.figure()
        ax = fig.add_subplot(111, projection='3d')
        ax.plot_surface(X, Y, RADIUS_SAMPLE.T,
                        cmap='ocean')  # Remember:  rows, cols = Z.shape
        surf_plot_name = path_datas_random + '/random_radius_' + str(
            i) + '.png'
        plt.savefig(surf_plot_name)

        liste_contour = []

        for k in range(len(COORD)):

            R_THETA = RADIUS_SAMPLE[k, :]
            TAB = np.hstack((thetas[np.newaxis].T, R_THETA[np.newaxis].T))

            #COORD
            C = BSPLINE[k, :]
            T = TAN[k, :]
            N = NOR[k, :]
            B = BI[k, :]

            #RECONSTRUCTION OF SHAPE OF THE SECTION
            PASSAGE = gmr.Matrice_de_passage(T, N, B)
            COORD_PLAN = ((np.dot(PASSAGE.T, C.T)).T)
            CONTOUR = gmr.Reconstruction_contour(COORD_PLAN, TAB, PASSAGE)
            liste_contour.append(CONTOUR)

        L = np.vstack(liste_contour)
        ANEVRISME = np.vstack((BSPLINE, L))
        gf.Write_csv(
            path_datas_random + '/' + "RANDOM_ANEURYSM_{}.csv".format(i),
            ANEVRISME, "x, y, z")

    end_time = time.time()
    print("Total time ", round(end_time - start_time, 2))
    print("")

    return 0