Beispiel #1
0
def Wall_radius_pod(thetas,
                    liste_r_theta,
                    epsilon_wall=1.e-2,
                    epsilon_radius=1.e-2,
                    read_file=True):

    #FIRST TIME YOU USE THIS FUNCTION YOU CAN'T WRITE read_file = True
    #COMPUTING THE MATRICES MAT AND RAYON CAN BE VERY LONG. ONCE COMPUTED
    # FOR THE FIRST TIME IT IS RECOMMANDED TO STORE IT IN YOUR COMPUTER
    #AUTOMATICALLY DONE LINE 82 AND 83. THEN NEXT TIME YOU CAN CALL THEM
    #USING read_file = True AND LINE 87 and 89.

    print("----> Creation Matrix of coefficients")
    if read_file == False:
        #SET FOURIER SERIES ORDER
        f_order = 5
        #COMPUTE MATRICES WALL SHAPE AND RADIUS
        MAT, RAYON = Matrice_coefficients_wall(thetas,
                                               liste_r_theta,
                                               fourier_order=f_order)
        np.savetxt("files/MATRIX_WALL.csv", MAT, delimiter=', ')
        np.savetxt("files/MATRIX_RAYON.csv", MAT, delimiter=', ')
    else:
        print("------> Reading existing file")
        MAT = gf.Read_parametrization("files/MATRIX_WALL.csv")
        RAYON = gf.Read_parametrization("files/MATRIX_RADIUS.csv")

    print("----> Creation of wall reduced basis")
    print("------> Epsilon Wall : ", epsilon_wall)
    PHI_WALL = Proper_orthogonal_decomposition(MAT,
                                               epsilon_wall,
                                               nb_mods=False,
                                               plot=False)
    #err_one_left_wall = One_left_out(MAT, epsilon_wall, plot = True)
    #print("------> Mean of one left out error : ", err_one_left_wall)

    print("----> Creation of radius reduced basis")
    print("------> Epsilon Radius : ", epsilon_radius)
    PHI_RAYON = Proper_orthogonal_decomposition(RAYON,
                                                epsilon_radius,
                                                nb_mods=False,
                                                plot=False)
    #err_one_left_radius= One_left_out(RAYON, epsilon_radius, plot = True)
    #print("------> Mean of one left out error : ", err_one_left_radius)

    #MAT_A, MAT_B = Fourier_variations(MAT)
    #PHI_A = Proper_orthogonal_decomposition(MAT_A, 1.e-2, nb_mods = False, plot = False)
    #PHI_B = Proper_orthogonal_decomposition(MAT_B, 1.e-1, nb_mods = False, plot = False)

    print("----> Compute solution's coefficients of both reduced basis")
    COEFFS_WALL = Coefficients_base_pod(MAT, PHI_WALL)
    COEFFS_RAYON = Coefficients_base_pod(RAYON, PHI_RAYON)

    #COEFFS_A = Coefficients_base_pod(MAT_A, PHI_A)
    #COEFFS_B = Coefficients_base_pod(MAT_B, PHI_B)

    return PHI_WALL, PHI_RAYON, COEFFS_WALL, COEFFS_RAYON
def Parametrization_analysis(path_parametrization):

    PARA = gf.Read_parametrization(path_parametrization)

    #WE ALWAYS BEGIN PARAMETRIZATION WITH HIGHEST Z COORDINATES
    PZ1 = PARA[0, 2]
    PZ2 = PARA[-1, 2]
    if PZ2 > PZ1:
        PARA = np.flip(PARA, axis=0)

    #COMPUTE THETAS
    print("-----> Number of thetas : ", np.shape(PARA[:, 12:])[1])

    #COMPUTE NB POINT CENTERLINE
    print("-----> Number of centerline's points : ", np.shape(PARA)[0])

    #COMPUTE AVERAGE_RADIUS
    average_radius = np.mean(PARA[:, 12:])
    print("-----> The average radius is {} mm. ".format(
        round(average_radius, 2)))

    #COMPUTE ARC LENGTH
    arc_length = stats.arc_length(1.0, PARA[:, 0:3])
    print("-----> The arc_length is {} mm. ".format(round(arc_length, 2)))

    return PARA, average_radius, arc_length
Beispiel #3
0
def Wall_coeff_evolution_pod(thetas,
                             liste_r_theta,
                             epsilon_wall=1.e-2,
                             read_file=True):

    print("----> Creation Matrix of coefficients")
    if read_file == False:
        #SET FOURIER SERIES ORDER
        f_order = 5
        #COMPUTE MATRICES WALL SHAPE AND RADIUS
        MAT, RAYON = Matrice_coefficients_wall(thetas,
                                               liste_r_theta,
                                               fourier_order=f_order)
        np.savetxt("files/MATRIX_WALL.csv", MAT, delimiter=', ')
        np.savetxt("files/MATRIX_RAYON.csv", MAT, delimiter=', ')

    else:
        print("------> Reading existing file")
        MAT = gf.Read_parametrization("files/MATRIX_WALL.csv")

    liste_mat_coeff = Liste_coeff_fourier(MAT)

    liste_pod = []
    liste_pod_coeff = []

    for i in range(len(liste_mat_coeff)):
        print("----> Creation of reduced basis for Fourier coeff {}".format(i))
        print("------> Epsilon Wall : ", epsilon_wall)
        COEFF = liste_mat_coeff[i]
        PHI_COEFF = Proper_orthogonal_decomposition(COEFF,
                                                    epsilon_wall,
                                                    nb_mods=False,
                                                    plot=False)
        liste_pod.append(PHI_COEFF)

        print(
            "----> Compute solution's coefficients of reduced basis {}".format(
                i))
        COEFF_POD = Coefficients_base_pod(COEFF, PHI_COEFF)
        liste_pod_coeff.append(COEFF_POD)

    return liste_pod, liste_pod_coeff
Beispiel #4
0
import scipy.stats as st
from scipy.spatial import procrustes
from scipy import interpolate
from sklearn import mixture
from symfit import parameters, variables, sin, cos, Fit
import matplotlib.pyplot as plt
import matplotlib.colors as col

#UTILITIES FILES
from tools import Files_Management as gf
from tools import BSplines_Utilities as bs
from tools import Statistics_Utilities as stats
from geometry_utilities import Geometry_Treatment as geo_treat

#RELOAD ALL NEEDED PACKAGES
gf.Reload(gf)
gf.Reload(bs)
gf.Reload(stats)
gf.Reload(geo_treat)

################################################################################################################
############################################### POD ON REDUCED BASIS ###########################################
################################################################################################################


def Centerline_pod(liste_control, epsilon=1.e-2):

    print("----> Procrustes Analysis + Save dilatation coefficient")
    #EXTRACT CONTROL POINTS FROM PROCRSUTES ANALYSIS
    procrust_control, disparity = Procrustes(liste_control)
    #EXTRACT DILATATION (SCALING) FROM PROCRUSTES ANALYSIS
import trimesh
import meshio

from vmtk import pypes
from vmtk import vmtkscripts

import vtk
from vtk.util.numpy_support import vtk_to_numpy
from vtk.util.numpy_support import numpy_to_vtk

from scipy.spatial import distance

from tools import Files_Management as gf
from tools import BSplines_Utilities as bs

gf.Reload(gf)
gf.Reload(bs)

############################################################################################
################################### NIFTI READER ###########################################
############################################################################################


def Mesh_from_Nifti(nifti_file_to_read, path_writer, only_lumen=True):

    if not os.path.exists(nifti_file_to_read):
        print(f"File: {nifti_file_to_read} does not exist")
        sys.exit(1)

    #NIFIT READING AND CONVERSION TO NUMPY ARRAY
    reader = vtk.vtkNIFTIImageReader()
def Geo_reduction_wall_radius(path_patients_data, nb_generation = 20, rot_sec = False, mesh_extractor = False):

    print("---------------------------------- START PROGRAMM ----------------------------------")

    #############################################################################
    ################ TRAINING DATA READING AND EXTRACTION #######################
    #############################################################################

    # EXTRACT DATA FROM FOLDER "path_patients_data". READ ALL PARAMETRIZATION FILES.
    print("--> DATAS EXTRACTION FROM PARAMETRIZATION FILES")
    folder = sorted([f for f in os.listdir(path_patients_data) if not f.startswith('.')], key = str.lower)
    print("----> Number of patients considered : {}".format(len(folder)))

    liste_control = []
    liste_r_theta = []
    liste_originale_centerline = []

    thetas = np.linspace(0, 2*np.pi, 160)

    #WE LOOP ON EACH PARAMETRIZATION FILE AND EXTRACT RELEVANT INFORMATION
    for i in folder :
        #CHANGE path IF YOU GAVE ANOTHER NAME TO PARAMETRIZATION FILE
        path = path_patients_data + '/' + i + '/' + i + '_parametrization.csv'
        PARA = gf.Read_parametrization(path)

        #EXTRACTION CENTERLINE
        CENTERLINE = PARA[:,0:3]
        liste_originale_centerline.append(CENTERLINE)

        #EXTRACTION CONTROL POINTS TO USE PROCRUSTES ANALYSES AND RECONSTRUCTION B-SPLINES
        extract = np.linspace(0, np.shape(CENTERLINE)[0]-1, 10, dtype = 'int')
        CONTROL = CENTERLINE[extract,:]
        liste_control.append(CONTROL)

        #EXTRACTION OF FUNCTION R_THETA : DISTANCE TO ANGLE
        r_theta = PARA[:,12:]
        liste_r_theta.append(r_theta)

    #UNCOMMENT NEXT LINE IF YOU WANT TO STORE ORIGINAL CENTERLINE IN A FILE
    #gf.Write_csv("ORIGINAL_CENTERLINES.csv", np.vstack(liste_originale_centerline), "x, y, z")
    #############################################################################
    #############################################################################
    #############################################################################

    #############################################################################
    ################ PARAMETERS FOR GEOMETRIC MODEL REDUCTION ###################
    #############################################################################

    #EPSILON TO AUTOMATICALLY EXTRACT NUMBER OF POD MODES
    epsilon_c = 1.e-3
    epsilon_w = 1.e-3
    epsilon_r = 1.e-3
    #PARAMETERS TO RECONSTRUCT ANEURYSMS
    nb_sections = 500
    nb_thetas = 160

    #############################################################################
    #############################################################################
    #############################################################################

    print("")
    print("--> EXTRACTION CENTERLINE REDUCED BASIS")

    dilatation, PHI_CENTERLINE, COEFFS_CENTERLINE = gmr.Centerline_pod(liste_control, epsilon = epsilon_c)

    print("")
    print("--> EXTRACTION WALL AND RADIUS REDUCED BASIS")

    PHI_WALL, PHI_RAYON, COEFFS_WALL, COEFFS_RAYON = gmr.Wall_radius_pod(thetas, liste_r_theta, epsilon_wall = epsilon_w, epsilon_radius = epsilon_r, read_file = True)

    print("")
    print("--> GENERATION OF RANDOM AORTIC ANEURYSMS GEOMETRIES")

    if rot_sec == True :

        path_to_save = "results/generation_wall_radius_with_rotation"
        if os.path.exists(path_to_save):
            shutil.rmtree(path_to_save)
        os.makedirs(path_to_save)

        for i in range(nb_generation) :

            print("----> GENERATED ANEURYSM NUMBER : ", i)

            #GIVE A NAME FOR THE ANEURYSM TO SAVE IT
            name = "ANEURYSM_WALL_RADIUS_" + str(i)
            #GIVE NAME OF FOLDER IN WHICH TO STORE ANEURYSMS

            ANEVRISME, ANEVRISME_ROT = gmr.Generator_wall_radius(PHI_CENTERLINE, PHI_WALL, PHI_RAYON, COEFFS_CENTERLINE, COEFFS_WALL, COEFFS_RAYON, dilatation, nb_sections, nb_thetas, rotation_section = True)
            gf.Write_csv(path_to_save + "/reconstruction_" + name + ".csv", ANEVRISME, "x, y, z")
            gf.Write_csv(path_to_save + "/reconstruction_rotation_" + name + ".csv", ANEVRISME_ROT, "x, y, z")

            if mesh_extractor == True :
                CONTOUR = ANEVRISME[nb_sections:,:]
                geo_treat.Mesh_generation(CONTOUR, path_to_save + "/mesh_contour_" + name + ".stl", nb_sections, nb_thetas)
                geo_treat.Read_and_Smooth(path_to_save + "/mesh_contour_" + name + ".stl", path_to_save + "mesh_smooth_" + name + ".stl", coeff_smooth = 0.001)

                CONTOUR2 = ANEVRISME_ROT[nb_sections:,:]
                geo_treat.Mesh_generation(CONTOUR2, path_to_save + "/mesh_contour_rotation_" + name + ".stl", nb_sections, nb_thetas)
                geo_treat.Read_and_Smooth(path_to_save + "/mesh_contour_rotation_" + name + ".stl", path_to_save + "mesh_smooth_rotation_" + name + ".stl", coeff_smooth = 0.001)

                #UNCOMMENT NEXT LINE IF YOU WANT TO REMESH FILES (WARNING : CAN BE VERY LOOONG TO COMPUTE!)
                #geo_treat.Surface_Remesh(path_to_save + "mesh_smooth_" + name + ".stl", path_to_save + "mesh_remesh_" + name + ".stl", target_edge_length = 0.3, nb_iterations = 10)

    else :

        path_to_save = "results/generation_wall_radius_no_rotation"
        if os.path.exists(path_to_save):
            shutil.rmtree(path_to_save)
        os.makedirs(path_to_save)

        for i in range(nb_generation) :

            print("----> GENERATED ANEURYSM NUMBER : ", i)

            #GIVE A NAME FOR THE ANEURYSM TO SAVE IT
            name = "ANEURYSM_WALL_RADIUS_" + str(i)
            #GIVE NAME OF FOLDER IN WHICH TO STORE ANEURYSMS

            ANEVRISME = gmr.Generator_wall_radius(PHI_CENTERLINE, PHI_WALL, PHI_RAYON, COEFFS_CENTERLINE, COEFFS_WALL, COEFFS_RAYON, dilatation, nb_sections, nb_thetas, rotation_section = False)
            gf.Write_csv(path_to_save + "/reconstruction_" + name + ".csv", ANEVRISME, "x, y, z")

            if mesh_extractor == True :
                CONTOUR = ANEVRISME[nb_sections:,:]
                geo_treat.Mesh_generation(CONTOUR, path_to_save + "/mesh_contour_" + name + ".stl", nb_sections, nb_thetas)
                geo_treat.Read_and_Smooth(path_to_save + "/mesh_contour_" + name + ".stl", path_to_save + "mesh_smooth_" + name + ".stl", coeff_smooth = 0.001)

                #UNCOMMENT NEXT LINE IF YOU WANT TO REMESH FILES (WARNING : CAN BE VERY LOOONG TO COMPUTE!)
                #geo_treat.Surface_Remesh(path_to_save + "mesh_smooth_" + name + ".stl", path_to_save + "mesh_remesh_" + name + ".stl", target_edge_length = 0.3, nb_iterations = 10)

    return 0
Beispiel #7
0
def Main_pre_processing(path_nifti_all, add_extension=False):

    print(
        "---------------------------------- START FILE TREATMENT ----------------------------------"
    )

    #READ THE FOLDER
    folder = sorted(
        [f for f in os.listdir(path_nifti_all) if not f.startswith('.')],
        key=str.lower)
    print("Number of nifti files in folder : {} \n".format(len(folder)))

    #WRITE HERE FOLDER NAME WHERE YOU WANT TO STORE YOUR DATA ! (MINE IS CALLED patients_data)
    path_datas = "patients_data_large/"

    #READ EACH FILE IN FOLDER
    for file in folder:

        print(
            "------------------------------ Treatment of patient {} ------------------------------ "
            .format(file))

        start_time = time.time()

        name_file = file[:4]

        print(
            " -> Creation of folder named {} and initialisation of all paths".
            format(name_file))

        #CREATION FOLDER
        dir = path_datas + name_file
        if os.path.exists(dir):
            shutil.rmtree(dir)
        os.makedirs(dir)

        #ALL PATH TO BE USED
        path_nifti_file = path_nifti_all + '/' + file
        path_marching_cube = dir + '/' + name_file + '_marching_cube.stl'
        path_mesh_closed = dir + '/' + name_file + '_mesh_closed.stl'
        path_centerline_vtp = dir + '/' + name_file + '_centerline.vtp'
        path_centerline_csv = dir + "/" + name_file + "_centerline_bspline.csv"
        path_control_csv = dir + "/" + name_file + "_centerline_control.csv"
        path_parametrization = dir + '/' + name_file + '_parametrization.csv'
        path_reconstruction = dir + '/' + name_file + '_reconstruction.csv'
        path_mesh_opened = dir + '/' + name_file + '_mesh_opened.stl'
        path_mesh_opened_remesh = dir + '/' + name_file + '_mesh_opened_remesh.stl'
        path_extension = dir + '/' + name_file + '_mesh_extension.stl'

        ########################################################################
        ######################## PARAMETERS USED ###############################
        ########################################################################

        #SMOOTHING
        coefficient_smoothing = 0.001
        iterations_smoothing = 50

        #CENTERLINE EXTRACTION
        coefficient_centerline = 0.1
        iterations_centerline = 50

        #CONVERT CENTERLINE TO BSPLINES
        nb_control_points = 10
        nb_centerline_points = 200
        bspline_degree = 3

        #WALL PARAMETRIZATION
        degree_centerline = 3
        nb_centerline_points_parametrization = 3000
        nb_thetas = 200
        fourier_order = 5

        #REMESH THE OPENED GEOMETRY
        edge_length = 0.5
        iterations_remesh = 5

        ########################################################################
        ########################################################################
        ########################################################################

        print("")
        print(
            " -> Read and Convert Nifti file {} to STL format thanks to Marching-Cube algorithm"
            .format(file))
        geo_treat.Mesh_from_Nifti(path_nifti_file,
                                  path_marching_cube,
                                  only_lumen=True)
        print("")

        print(
            " -> Read Marching-Cube STL file, Smooth it and Convert the result to STL format"
        )
        geo_treat.Read_and_Smooth(path_marching_cube,
                                  path_mesh_closed,
                                  coeff_smooth=coefficient_smoothing,
                                  nb_iterations=iterations_smoothing)
        print("")

        print(
            " -> Extraction of Centerline, Smooth it and Convert the result to VTP format"
        )
        geo_treat.Centerline_Extraction(path_mesh_closed,
                                        path_centerline_vtp,
                                        coeff_smooth=coefficient_centerline,
                                        nb_iterations=iterations_centerline)
        print("")

        print(
            " -> Conversion centerline .VTP to numpy format and save as .CSV file"
        )
        CONTROL, CENTERLINE = geo_treat.Centerline_BSpline(
            path_centerline_vtp,
            nb_control=nb_control_points,
            nb_points=nb_centerline_points,
            degree=bspline_degree)
        gf.Write_csv(path_control_csv, CONTROL, "x, y, z")
        gf.Write_csv(path_centerline_csv, CENTERLINE, "x, y, z")
        print("")

        print(" -> Parametrization of the mesh named " + name_file +
              "_mesh_closed.stl")
        PARAMETRIZATION, RECONSTRUCTION = para.Parametrization(
            CENTERLINE,
            path_mesh_closed,
            degree=degree_centerline,
            nb_centerline=nb_centerline_points_parametrization,
            nb_thetas=nb_thetas,
            nb_modes_fourier=fourier_order)
        np.savetxt(path_parametrization, PARAMETRIZATION, delimiter=", ")
        gf.Write_csv(path_reconstruction, RECONSTRUCTION, "x, y, z")
        print("")

        print(" -> Cut the geometry to the extremity to open it")
        geo_treat.Mesh_Slice(path_mesh_closed, PARAMETRIZATION,
                             path_mesh_opened)
        print("")

        #print(" -> Remesh of the open mesh geometry")
        #geo_treat.Surface_Remesh(path_mesh_opened, path_mesh_opened_remesh, target_edge_length = edge_length, nb_iterations = iterations_remesh)

        if add_extension == True:
            print(" -> Add extension at the boudaries of the open mesh")
            geo_treat.Add_extension(path_mesh_opened_remesh,
                                    path_extension,
                                    extension_ratio=10,
                                    target_edge_length=0.5,
                                    nb_iterations=5)

        end_time = time.time()
        print("Total time for current patient : ",
              round(end_time - start_time, 2))
        print("")
        print("\a")

    return 0
def Test_rotation_invariant(path_parametrization, path_solution):

    #### INITIALISATION
    PARA, average_radius, arc_length = Parametrization_analysis(
        path_parametrization)
    SOLUTION = Extract_solution(path_solution)

    #### COMPUTE CYLINDER
    CYLINDER = Mesh_cylinder(int(arc_length), int(average_radius), 1000, 160,
                             10)

    #### ORIGINAL MAPPING
    MESH_ORIGINAL = Mesh_original(PARA, 160, 10)
    INTERPOL = Interpolated_solution(MESH_ORIGINAL, SOLUTION)
    SOL_MESH_ORIGINAL = np.hstack((MESH_ORIGINAL, INTERPOL))
    SOL_CYLINDER = np.hstack((CYLINDER, INTERPOL))
    gf.Write_csv("SOL_ORIGINAL.csv", SOL_MESH_ORIGINAL,
                 "x, y, z, pressure, Vx, Vy, Vz, magnitude")
    gf.Write_csv("SOL_CYLINDER.csv", SOL_CYLINDER,
                 "x, y, z, pressure, Vx, Vy, Vz, magnitude")

    ### ROTATION
    thetax = 2 * np.pi * np.random.random()
    print("X axis rotation of angle : ", thetax)
    thetay = 2 * np.pi * np.random.random()
    print("Y axis rotation of angle : ", thetay)
    thetaz = 2 * np.pi * np.random.random()
    print("Z axis rotation of angle : ", thetaz)
    ROTX = np.array([[1, 0, 0], [0, np.cos(thetax), -np.sin(thetax)],
                     [0, np.sin(thetax), np.cos(thetax)]])
    ROTY = np.array([[np.cos(thetay), 0, np.sin(thetay)], [0, 1, 0],
                     [-np.sin(thetay), 0, np.cos(thetay)]])
    ROTZ = np.array([[np.cos(thetaz), -np.sin(thetaz), 0],
                     [np.sin(thetaz), np.cos(thetaz), 0], [0, 0, 1]])

    ### ROTATION SOLUTION VTU
    POINTS = SOLUTION[:, 0:3]
    VALUES = SOLUTION[:, 3:]
    ROT_POINTS_X = np.dot(ROTX, POINTS.T)
    ROT_POINTS_Y = np.dot(ROTY, ROT_POINTS_X)
    ROT_POINTS_Z = np.dot(ROTZ, ROT_POINTS_Y)
    ROT_SOLUTION = np.hstack((ROT_POINTS_Z.T, VALUES))

    ### ROTATION PARAMETRIZATION
    P = PARA[:, 0:3]
    T = PARA[:, 3:6]
    N = PARA[:, 6:9]
    B = PARA[:, 9:12]
    RT = PARA[:, 12:]
    ROT_P_X = np.dot(ROTX, P.T)
    ROT_T_X = np.dot(ROTX, T.T)
    ROT_N_X = np.dot(ROTX, N.T)
    ROT_B_X = np.dot(ROTX, B.T)
    ROT_P_Y = np.dot(ROTY, ROT_P_X)
    ROT_T_Y = np.dot(ROTY, ROT_T_X)
    ROT_N_Y = np.dot(ROTY, ROT_N_X)
    ROT_B_Y = np.dot(ROTY, ROT_B_X)
    ROT_P_Z = np.dot(ROTZ, ROT_P_Y)
    ROT_T_Z = np.dot(ROTZ, ROT_T_Y)
    ROT_N_Z = np.dot(ROTZ, ROT_N_Y)
    ROT_B_Z = np.dot(ROTZ, ROT_B_Y)

    ROT_PARA = np.hstack((ROT_P_Z.T, ROT_T_Z.T, ROT_N_Z.T, ROT_B_Z.T, RT))

    #### MAPPING GEO ROTATION
    MESH_ROT = Mesh_original(ROT_PARA, 160, 10)
    INTERPOL_ROT = Interpolated_solution(MESH_ROT, ROT_SOLUTION)
    SOL_MESH_ROT = np.hstack((MESH_ROT, INTERPOL_ROT))
    SOL_CYLINDER_ROT = np.hstack((CYLINDER, INTERPOL_ROT))
    gf.Write_csv("SOL_ORIGINAL_ROT.csv", SOL_MESH_ROT,
                 "x, y, z, pressure, Vx, Vy, Vz, magnitude")
    gf.Write_csv("SOL_CYLINDER_ROT.csv", SOL_CYLINDER_ROT,
                 "x, y, z, pressure, Vx, Vy, Vz, magnitude")

    #### COMPUTE ERROR
    error = np.linalg.norm(SOL_CYLINDER[:, -1] - SOL_CYLINDER_ROT[:, -1])
    print("L'erreur est de ", error)

    return 0
import numpy as np
import meshio
import time
import trimesh
import os

from math import *
from scipy.interpolate import griddata
from scipy.spatial import distance

from tools import Files_Management as gf
from tools import Statistics_Utilities as stats

gf.Reload(gf)
gf.Reload(stats)


def Parametrization_analysis(path_parametrization):

    PARA = gf.Read_parametrization(path_parametrization)

    #WE ALWAYS BEGIN PARAMETRIZATION WITH HIGHEST Z COORDINATES
    PZ1 = PARA[0, 2]
    PZ2 = PARA[-1, 2]
    if PZ2 > PZ1:
        PARA = np.flip(PARA, axis=0)

    #COMPUTE THETAS
    print("-----> Number of thetas : ", np.shape(PARA[:, 12:])[1])

    #COMPUTE NB POINT CENTERLINE
def Main_generative_algorithm(path_patients_data):

    print("Start Generation Algorithm \n")

    start_time = time.time()

    #READ THE FOLDER
    folder = sorted(
        [f for f in os.listdir(path_patients_data) if not f.startswith('.')],
        key=str.lower)
    print("Number of patients data files in folder : {} \n".format(
        len(folder)))

    # Name of folder were generative surfaces are going to be stored
    path_datas = "results/gen_surfaces/training_set"
    path_datas_random = "results/gen_surfaces/predict_set"

    print(" -> Creation of folder named {} to store all generative surfaces", )
    dir1 = path_datas
    if os.path.exists(dir1):
        shutil.rmtree(dir1)
    os.makedirs(dir1)
    dir2 = path_datas_random
    if os.path.exists(dir2):
        shutil.rmtree(dir2)
    os.makedirs(dir2)

    # creating surfaces training set
    training_set = []
    liste_control = []

    #READ EACH FILE IN FOLDER
    for file in folder:

        print("Creating Generative Surface from patient {} surface ".format(
            file))

        # name of the .csv file that contains the radii
        surf_file_name = path_patients_data + '/' + file + '/' + file + '_parametrization.csv'

        # loading .csv file (surf_data is of type 'float64')
        #print("surf_file_name: ", surf_file_name, "\n")
        PARA = gf.Read_parametrization(surf_file_name)

        CENTERLINE = PARA[:, 0:3]
        extract = np.linspace(0, np.shape(CENTERLINE)[0] - 1, 10, dtype='int')
        CONTROL = CENTERLINE[extract, :]
        liste_control.append(CONTROL)

        RADIUS = PARA[:, 12:]
        n_point = np.shape(RADIUS)[0]
        n_radius = np.shape(RADIUS)[1]

        # preparing mesh for plotting
        nb_centerline_points = np.linspace(0,
                                           n_point,
                                           n_point,
                                           endpoint=True,
                                           dtype=int)
        nb_thetas = np.linspace(0,
                                n_radius,
                                n_radius,
                                endpoint=True,
                                dtype=int)
        X, Y = np.meshgrid(nb_centerline_points, nb_thetas)
        Z = RADIUS

        fig = plt.figure()
        ax = fig.add_subplot(111, projection='3d')
        ax.plot_surface(X, Y, Z.T,
                        cmap='ocean')  # Remember:  rows, cols = Z.shape
        #plt.show()

        # surface plot file name:
        surf_plot_name = path_datas + '/' + file + '_surface.png'
        plt.savefig(surf_plot_name)
        # vista 'dall'alto' (piano X-Y)
        #ax.view_init(90, 90)
        # surface plot seen from above file name:
        #surf_plot_name = surf_folder_name + '/' + name_file + '_surface_XY.png'
        #plt.savefig(surf_plot_name)

        # add surface file to list for generation algorithm
        training_set.append(RADIUS.ravel())

    ######## CENTERLINE ##############
    print(
        "Procrustes Analysis + Save dilatation coefficient + Creation Matrix of coefficients"
    )
    #EXTRACT CONTROL POINTS FROM PROCRSUTES ANALYSIS
    procrust_control, disparity = gmr.Procrustes(liste_control)
    #EXTRACT DILATATION (SCALING) FROM PROCRUSTES ANALYSIS
    dilatation = gmr.Dilatation(liste_control)
    DILATATION = np.asarray(dilatation).reshape(-1, 1)
    print("Size of dilatation training set : ", np.shape(DILATATION))
    #RECONSTRUCT NEW CENTERLINES FROM PROCRUSTES CONTROL POINTS
    procrust_bspline = gmr.Construction_bsplines(procrust_control, 200, 5)
    for i in range(len(procrust_bspline)):
        SPLINE = procrust_bspline[i]
        fig = plt.figure()
        ax = fig.add_subplot(111, projection='3d')
        ax.scatter(SPLINE[:, 0], SPLINE[:, 1],
                   SPLINE[:, 2])  # Remember:  rows, cols = Z.shape
        spline_plot_name = path_datas + '/' + str(i) + '_spline.png'
        plt.savefig(spline_plot_name)

    #DEGREE OF POLYNOMIAL APPROXIMATION
    degree = 5
    #CREATE MATRIX ON WHICH TO RUN POD OF POLYNOMIAL COEFFICIENTS
    print("Degree polynomial approximation : ", degree)
    TRAIN_COEFF = gmr.Matrice_coefficients_centerline(procrust_bspline,
                                                      degree_approx=degree)
    print("Size of coefficient training set ", np.shape(TRAIN_COEFF))

    ######### RADIUS ##################
    TRAIN_RADIUS = np.vstack(training_set)
    print("Size of the radius training set : ", np.shape(TRAIN_RADIUS))

    TRAIN = np.hstack((DILATATION, TRAIN_COEFF.T, TRAIN_RADIUS))
    print("Size of the full training set : ", np.shape(TRAIN))

    ##### DIMENSIONALITY REDUCTION ##########
    pca = PCA(0.99999, whiten=True, svd_solver='full')
    REDUCED = pca.fit_transform(TRAIN)
    print('PCA : shape of digits reduced dataset: ', np.shape(REDUCED))

    ##### PERFORM AIC TO SEARCH BEST NUMBER OF COMPONENTS ################
    min_n_components = 1
    max_n_components = np.shape(REDUCED)[0]
    n_components = np.arange(min_n_components, max_n_components, 3)
    models = [
        GMM(n, covariance_type='full', random_state=0) for n in n_components
    ]
    aics = [model.fit(REDUCED).aic(REDUCED) for model in models]

    fig = plt.figure()
    plt.plot(n_components, aics)
    #plt.show()
    plt.savefig(path_datas + '/' +
                'AIC_graph.png')  # can hide DeprecationWarning

    mini = np.argmin(aics)
    best_nb_components = n_components[mini]
    print("Best number of components is : ", best_nb_components)

    ##### PERFORM GMM WITH BEST NUMBER COMPONENTS #######################
    gmm = GMM(best_nb_components, covariance_type='full', random_state=0)
    gmm.fit(REDUCED)
    print('Convergence of GMM model fit to digits reduced dataset: ',
          gmm.converged_)

    # n_sample: sample of new surfaces
    n_sample = 30
    DATA_NEW = gmm.sample(n_sample, random_state=0)
    print('Shape of random data : ', np.shape(DATA_NEW))

    # inverse transform of the PCA object to construct the new surfaces
    NEW = pca.inverse_transform(DATA_NEW)
    print('Shape of random data after inverse PCA : ', np.shape(NEW))

    thetas = np.linspace(0, 2 * np.pi, n_radius)
    t_anevrisme = np.linspace(0, 1, 1000)

    for i in range(n_sample):

        print("Saving sample {} and create aneurysm".format(i))

        SAMPLE = NEW[i, :]
        DILATATION_SAMPLE = SAMPLE[0]
        CENTERLINE_SAMPLE = SAMPLE[1:3 * (degree + 1) + 1]
        RADIUS_SAMPLE = SAMPLE[3 * (degree + 1) + 1:]

        ### CENTERLINE ##############
        step = int(len(CENTERLINE_SAMPLE) / 3)
        coeffs_x = CENTERLINE_SAMPLE[0:step]
        coeffs_y = CENTERLINE_SAMPLE[step:2 * step]
        coeffs_z = CENTERLINE_SAMPLE[2 * step:]
        px = np.poly1d(coeffs_x)
        py = np.poly1d(coeffs_y)
        pz = np.poly1d(coeffs_z)
        der1x = np.polyder(px, m=1)
        der1y = np.polyder(py, m=1)
        der1z = np.polyder(pz, m=1)
        der2x = np.polyder(px, m=2)
        der2y = np.polyder(py, m=2)
        der2z = np.polyder(pz, m=2)

        COORD = np.zeros((len(t_anevrisme), 3))
        COORD[:, 0] = px(t_anevrisme)
        COORD[:, 1] = py(t_anevrisme)
        COORD[:, 2] = pz(t_anevrisme)
        COORD *= DILATATION_SAMPLE
        print("------> Arc Length of the centerline : ",
              stats.arc_length(1, COORD))
        BSPLINE, TAN, NOR, BI = stats.frenet_frame(COORD)

        fig = plt.figure()
        ax = fig.add_subplot(111, projection='3d')
        ax.scatter(BSPLINE[:, 0], BSPLINE[:, 1],
                   BSPLINE[:, 2])  # Remember:  rows, cols = Z.shape
        bspline_plot_name = path_datas_random + '/random_bspline_' + str(
            i) + '.png'
        plt.savefig(bspline_plot_name)

        ### RADIUS ##################
        RADIUS_SAMPLE = RADIUS_SAMPLE.reshape(n_point, n_radius)
        #np.savetxt(path_datas + '/random_' + str(i) + ".csv", SAMPLE, delimiter = ',')

        fig = plt.figure()
        ax = fig.add_subplot(111, projection='3d')
        ax.plot_surface(X, Y, RADIUS_SAMPLE.T,
                        cmap='ocean')  # Remember:  rows, cols = Z.shape
        surf_plot_name = path_datas_random + '/random_radius_' + str(
            i) + '.png'
        plt.savefig(surf_plot_name)

        liste_contour = []

        for k in range(len(COORD)):

            R_THETA = RADIUS_SAMPLE[k, :]
            TAB = np.hstack((thetas[np.newaxis].T, R_THETA[np.newaxis].T))

            #COORD
            C = BSPLINE[k, :]
            T = TAN[k, :]
            N = NOR[k, :]
            B = BI[k, :]

            #RECONSTRUCTION OF SHAPE OF THE SECTION
            PASSAGE = gmr.Matrice_de_passage(T, N, B)
            COORD_PLAN = ((np.dot(PASSAGE.T, C.T)).T)
            CONTOUR = gmr.Reconstruction_contour(COORD_PLAN, TAB, PASSAGE)
            liste_contour.append(CONTOUR)

        L = np.vstack(liste_contour)
        ANEVRISME = np.vstack((BSPLINE, L))
        gf.Write_csv(
            path_datas_random + '/' + "RANDOM_ANEURYSM_{}.csv".format(i),
            ANEVRISME, "x, y, z")

    end_time = time.time()
    print("Total time ", round(end_time - start_time, 2))
    print("")

    return 0