Exemplo n.º 1
0
def SampleLocalExtraction(image_input, sample_points, emprise_local_sample, vector_sample_local_output, name_column, band_name, ram_otb=0, format_vector='ESRI Shapefile', extension_vector=".shp", save_results_intermediate=False):

    # Creation de la zone local
    empr_xmin, empr_xmax, empr_ymin, empr_ymax = getEmpriseImage(image_input)
    createEmpriseShapeReduced(sample_points, empr_xmin, empr_ymin, empr_xmax, empr_ymax, emprise_local_sample, format_vector)

    # Extract sample
    command = "otbcli_SampleExtraction -in %s -vec %s -outfield prefix -outfield.prefix.name %s -out %s -field %s" %(image_input, emprise_local_sample, band_name, vector_sample_local_output, name_column)
    if ram_otb > 0:
        command += " -ram %d" %(ram_otb)
    print(command)
    exitCode = os.system(command)
    if exitCode != 0:
        raise NameError(cyan + "SampleLocalExtraction() : " + bold + red + "An error occured during otbcli_SampleExtraction command. See error message above." + endC)

    # Clean temp file
    if not save_results_intermediate :
        removeVectorFile(emprise_local_sample)

    return
def processTDCfilesSmoothAndFusion(coastline_vectors_input_list, vector_rocky_input, vector_all_output, vector_withrocky_output, generalize_param_method, generalize_param_threshold, name_column_fusion, path_time_log, epsg=2154, format_vector='ESRI Shapefile', extension_vector='.shp', save_results_intermediate=False, overwrite=True):

    # Mise à jour du Log
    starting_event = "processTDCfilesSmoothAndFusion() : Create final coastline starting : "
    timeLine(path_time_log,starting_event)

    print(endC)
    print(bold + green + "## START : POST TRAITEMENT TDC" + endC)
    print(endC)

    if debug >= 2:
        print(bold + green + "processTDCfilesSmoothAndFusion() : Variables dans la fonction" + endC)
        print(cyan + "processTDCfilesSmoothAndFusion() : " + endC + "coastline_vectors_input_list : " + str(coastline_vectors_input_list) + endC)
        print(cyan + "processTDCfilesSmoothAndFusion() : " + endC + "vector_rocky_input : " + str(vector_rocky_input) + endC)
        print(cyan + "processTDCfilesSmoothAndFusion() : " + endC + "vector_all_output : " + str(vector_all_output) + endC)
        print(cyan + "processTDCfilesSmoothAndFusion() : " + endC + "vector_withrocky_output : " + str(vector_withrocky_output) + endC)
        print(cyan + "processTDCfilesSmoothAndFusion() : " + endC + "generalize_param_method : " + str(generalize_param_method) + endC)
        print(cyan + "processTDCfilesSmoothAndFusion() : " + endC + "generalize_param_threshold : " + str(generalize_param_threshold) + endC)
        print(cyan + "processTDCfilesSmoothAndFusion() : " + endC + "name_column_fusion : " + str(name_column_fusion) + endC)
        print(cyan + "processTDCfilesSmoothAndFusion() : " + endC + "epsg : " + str(epsg) + endC)
        print(cyan + "processTDCfilesSmoothAndFusion() : " + endC + "format_vector : " + str(format_vector) + endC)
        print(cyan + "processTDCfilesSmoothAndFusion() : " + endC + "extension_vector : " + str(extension_vector) + endC)
        print(cyan + "processTDCfilesSmoothAndFusion() : " + endC + "path_time_log : " + str(path_time_log) + endC)
        print(cyan + "processTDCfilesSmoothAndFusion() : " + endC + "save_results_intermediate : " + str(save_results_intermediate) + endC)
        print(cyan + "processTDCfilesSmoothAndFusion() : " + endC + "overwrite : " + str(overwrite) + endC)

    SUFFIX_SMOOTH = "_smooth"
    SUFFIX_TMP = "_tmp"
    SUFFIX_SMOOTH = "_smooth"
    SUFFIX_FUSION = "_fusion"

    repertory_output = os.path.dirname(vector_all_output)
    file_name = os.path.splitext(os.path.basename(vector_all_output))[0]
    vector_fusion = repertory_output + os.sep + file_name + SUFFIX_FUSION + extension_vector

    repertory_temp = repertory_output + os.sep + file_name + SUFFIX_TMP
    if not os.path.exists(repertory_temp):
        os.makedirs(repertory_temp)

    # Vérification de l'existence du vecteur de sortie
    check = os.path.isfile(vector_all_output)

    # Si oui et si la vérification est activée, passage à l'étape suivante
    if check and not overwrite :
        print(cyan + "processTDCfilesSmoothAndFusion() : " + bold + green +  "Vector general coastline already existe : " + str(vector_all_output) + "." + endC)
    # Si non ou si la vérification est désactivée, application des traitements de lissage et de la fusion
    else:
        # Tentative de suppresion des fichiers
        try:
            removeVectorFile(vector_all_output)
            removeVectorFile(vector_withrocky_output)
        except Exception:
            # Ignore l'exception levée si le fichier n'existe pas (et ne peut donc pas être supprimé)
            pass

        # Pour tous les fichiers vecteurs d'entrée appliquer le traitement de lissage par GRASS
        param_generalize_dico = {"method":generalize_param_method, "threshold":generalize_param_threshold}
        vectors_temp_output_list = []

        for input_vector in coastline_vectors_input_list :
            vector_name = os.path.splitext(os.path.basename(input_vector))[0]
            output_temp_vector = repertory_temp + os.sep + vector_name + SUFFIX_TMP + extension_vector
            output_smooth_vector = repertory_temp + os.sep + vector_name + SUFFIX_SMOOTH + extension_vector

            vectors_temp_output_list.append(output_temp_vector)
            xmin, xmax, ymin, ymax = getEmpriseFile(input_vector, format_vector)
            projection = getProjection(input_vector, format_vector)
            if projection is None:
                projection = epsg

            # Init GRASS
            if debug >= 3:
                print(cyan + "processTDCfilesSmoothAndFusion() : " + bold + green +  "Initialisation de GRASS " + endC)
            initializeGrass(repertory_temp, xmin, xmax, ymin, ymax, 1, 1, projection)

            # Generalize GRASS
            if debug >= 3:
                print(cyan + "processTDCfilesSmoothAndFusion() : " + bold + green +  "Applying smooth GRASS for vector : " + str(input_vector) + endC)
            smoothGeomGrass(input_vector, output_smooth_vector, param_generalize_dico, format_vector, overwrite)
            geometries2multigeometries(output_smooth_vector, output_temp_vector, name_column_fusion, format_vector)

            # Init GRASS
            if debug >= 3:
                print(cyan + "processTDCfilesSmoothAndFusion() : " + bold + green +  "Cloture de GRASS " + endC)
            cleanGrass(repertory_temp)

        if debug >= 3:
            print(cyan + "processTDCfilesSmoothAndFusion() : " + bold + green +  "Fusion de tous les vecteurs lissés : " + str(vectors_temp_output_list) + endC)

        # Fusion de tous les fichiers vecteurs temp
        fusionVectors(vectors_temp_output_list, vector_fusion, format_vector)

        # Suppression du champ "cat" introduit par l'application GRASS
        deleteFieldsVector(vector_fusion, vector_all_output, ["cat"], format_vector)

        # Re-met à jour le champ id avec un increment
        updateIndexVector(vector_all_output, "id", format_vector)

        # Nettoyage des zones rocheuses sur la ligne de trait de côte
        if vector_rocky_input != "" and vector_withrocky_output != "":
            if debug >= 3:
                print("\n" + cyan + "processTDCfilesSmoothAndFusion() : " + bold + green +  "Creation d'un trait de côte generale sans les zones rocheuses : " + str(vector_withrocky_output) + endC)
            differenceVector(vector_rocky_input, vector_all_output, vector_withrocky_output, overwrite, format_vector)

    # Suppression des fichiers intermédiaires
    if not save_results_intermediate :
        if debug >= 3:
            print(cyan + "processTDCfilesSmoothAndFusion() : " + bold + green +  "Suppression des fichiers temporaires " + endC)
        if os.path.exists(repertory_temp):
            shutil.rmtree(repertory_temp)
        removeVectorFile(vector_fusion)

    print(endC)
    print(bold + green + "## END :  POST TRAITEMENT TDC" + endC)
    print(endC)

    # Mise à jour du Log
    ending_event = "processTDCfilesSmoothAndFusion() : Create final coastline ending : "
    timeLine(path_time_log,ending_event)

    return
def imperviousSurfaceFraction(grid_input, grid_output, classif_input, class_imprevious_list, path_time_log, format_vector='ESRI Shapefile', extension_raster=".tif", save_results_intermediate=False, overwrite=True):

    print(bold + yellow + "Début du calcul de l'indicateur Impervious Surface Fraction." + endC + "\n")
    timeLine(path_time_log, "Début du calcul de l'indicateur Impervious Surface Fraction : ")

    if debug >= 3 :
        print(bold + green + "imperviousSurfaceFraction() : Variables dans la fonction" + endC)
        print(cyan + "imperviousSurfaceFraction() : " + endC + "grid_input : " + str(grid_input) + endC)
        print(cyan + "imperviousSurfaceFraction() : " + endC + "grid_output : " + str(grid_output) + endC)
        print(cyan + "imperviousSurfaceFraction() : " + endC + "classif_input : " + str(classif_input) + endC)
        print(cyan + "imperviousSurfaceFraction() : " + endC + "class_imprevious_list : " + str(class_imprevious_list) + endC)
        print(cyan + "imperviousSurfaceFraction() : " + endC + "path_time_log : " + str(path_time_log) + endC)
        print(cyan + "imperviousSurfaceFraction() : " + endC + "format_vector : " + str(format_vector) + endC)
        print(cyan + "imperviousSurfaceFraction() : " + endC + "extension_raster : " + str(extension_raster) + endC)
        print(cyan + "imperviousSurfaceFraction() : " + endC + "save_results_intermediate : " + str(save_results_intermediate) + endC)
        print(cyan + "imperviousSurfaceFraction() : " + endC + "overwrite : " + str(overwrite) + endC)

    if not os.path.exists(grid_output) or overwrite:

        ############################################
        ### Préparation générale des traitements ###
        ############################################

        if os.path.exists(grid_output):
            removeVectorFile(grid_output)

        temp_path = os.path.dirname(grid_output) + os.sep + "ImperviousSurfaceFraction"
        impermeability_raster = temp_path + os.sep + "impermeability" + extension_raster

        if os.path.exists(temp_path):
            shutil.rmtree(temp_path)
        os.makedirs(temp_path)

        ##############################
        ### Calcul de l'indicateur ###
        ##############################

        print(bold + cyan + "Création de la carte d'imperméabilité :" + endC + "\n")
        timeLine(path_time_log, "    Création de la carte d'imperméabilité : ")
        expression = ""
        for id_class in class_imprevious_list :
            expression += "im1b1==%s or " %(str(id_class))
        expression = expression[:-4]
        command = "otbcli_BandMath -il %s -out %s uint8 -exp '%s ? 1 : 99'" %(classif_input, impermeability_raster, expression)
        if debug >= 3 :
            print(command)
        exit_code = os.system(command)
        if exit_code != 0:
            print(command)
            print(cyan + "imperviousSurfaceFraction() : " + bold + red + "!!! Une erreur c'est produite au cours de la commande otbcli_BandMath : " + command + ". Voir message d'erreur." + endC, file=sys.stderr)
            raise

        print(bold + cyan + "Récupération de Impervious Surface Fraction par maille :" + endC + "\n")
        timeLine(path_time_log, "    Récupération de Impervious Surface Fraction par maille : ")
        statisticsVectorRaster(impermeability_raster, grid_input, grid_output, 1, True, False, False, [], [], {99:'Perm', 1:'Imperm'}, path_time_log, True, format_vector, save_results_intermediate, overwrite)


        ##########################################
        ### Nettoyage des fichiers temporaires ###
        ##########################################

        if not save_results_intermediate:
            if os.path.exists(temp_path):
                shutil.rmtree(temp_path)

    else:
        print(bold + magenta + "Le calcul du Impervious Surface Fraction a déjà eu lieu." + endC + "\n")

    print(bold + yellow + "Fin du calcul de l'indicateur Impervious Surface Fraction." + endC + "\n")
    timeLine(path_time_log, "Fin du calcul de l'indicateur Impervious Surface Fraction : ")

    return
Exemplo n.º 4
0
def soilOccupationChange(input_plot_vector, output_plot_vector, footprint_vector, input_tx_files_list, evolutions_list=['0:1:11000:10:50:and', '0:1:12000:10:50:and', '0:1:21000:10:50:and', '0:1:22000:10:50:and', '0:1:23000:10:50:and'], class_label_dico={11000:'Bati', 12000:'Route', 21000:'SolNu', 22000:'Eau', 23000:'Vegetation'}, epsg=2154, no_data_value=0, format_raster='GTiff', format_vector='ESRI Shapefile', extension_raster='.tif', extension_vector='.shp', postgis_ip_host='localhost', postgis_num_port=5432, postgis_user_name='postgres', postgis_password='******', postgis_database_name='database', postgis_schema_name='public', postgis_encoding='latin1', path_time_log='', save_results_intermediate=False, overwrite=True):

    if debug >= 3:
        print('\n' + bold + green + "Evolution de l'OCS par parcelle - Variables dans la fonction :" + endC)
        print(cyan + "    soilOccupationChange() : " + endC + "input_plot_vector : " + str(input_plot_vector) + endC)
        print(cyan + "    soilOccupationChange() : " + endC + "output_plot_vector : " + str(output_plot_vector) + endC)
        print(cyan + "    soilOccupationChange() : " + endC + "footprint_vector : " + str(footprint_vector) + endC)
        print(cyan + "    soilOccupationChange() : " + endC + "input_tx_files_list : " + str(input_tx_files_list) + endC)
        print(cyan + "    soilOccupationChange() : " + endC + "evolutions_list : " + str(evolutions_list) + endC)
        print(cyan + "    soilOccupationChange() : " + endC + "class_label_dico : " + str(class_label_dico) + endC)
        print(cyan + "    soilOccupationChange() : " + endC + "epsg : " + str(epsg) + endC)
        print(cyan + "    soilOccupationChange() : " + endC + "no_data_value : " + str(no_data_value) + endC)
        print(cyan + "    soilOccupationChange() : " + endC + "format_raster : " + str(format_raster) + endC)
        print(cyan + "    soilOccupationChange() : " + endC + "format_vector : " + str(format_vector) + endC)
        print(cyan + "    soilOccupationChange() : " + endC + "extension_raster : " + str(extension_raster) + endC)
        print(cyan + "    soilOccupationChange() : " + endC + "extension_vector : " + str(extension_vector) + endC)
        print(cyan + "    soilOccupationChange() : " + endC + "postgis_ip_host : " + str(postgis_ip_host) + endC)
        print(cyan + "    soilOccupationChange() : " + endC + "postgis_num_port : " + str(postgis_num_port) + endC)
        print(cyan + "    soilOccupationChange() : " + endC + "postgis_user_name : " + str(postgis_user_name) + endC)
        print(cyan + "    soilOccupationChange() : " + endC + "postgis_password : "******"    soilOccupationChange() : " + endC + "postgis_database_name : " + str(postgis_database_name) + endC)
        print(cyan + "    soilOccupationChange() : " + endC + "postgis_schema_name : " + str(postgis_schema_name) + endC)
        print(cyan + "    soilOccupationChange() : " + endC + "postgis_encoding : " + str(postgis_encoding) + endC)
        print(cyan + "    soilOccupationChange() : " + endC + "path_time_log : " + str(path_time_log) + endC)
        print(cyan + "    soilOccupationChange() : " + endC + "save_results_intermediate : " + str(save_results_intermediate) + endC)
        print(cyan + "    soilOccupationChange() : " + endC + "overwrite : " + str(overwrite) + endC + '\n')

    # Définition des constantes
    EXTENSION_TEXT = '.txt'
    SUFFIX_TEMP = '_temp'
    SUFFIX_CUT = '_cut'
    AREA_FIELD = 'st_area'
    GEOM_FIELD = 'geom'

    # Mise à jour du log
    starting_event = "soilOccupationChange() : Début du traitement : "
    timeLine(path_time_log, starting_event)

    print(cyan + "soilOccupationChange() : " + bold + green + "DEBUT DES TRAITEMENTS" + endC + '\n')

    # Définition des variables 'basename'
    output_plot_basename = os.path.splitext(os.path.basename(output_plot_vector))[0]

    # Définition des variables temp
    temp_directory = os.path.dirname(output_plot_vector) + os.sep + output_plot_basename + SUFFIX_TEMP
    plot_vector_cut = temp_directory + os.sep + output_plot_basename + SUFFIX_CUT + extension_vector

    # Définition des variables PostGIS
    plot_table = output_plot_basename.lower()

    # Fichier .txt associé au fichier vecteur de sortie, sur la liste des évolutions quantifiées
    output_evolution_text_file = os.path.splitext(output_plot_vector)[0] + EXTENSION_TEXT

    # Nettoyage des traitements précédents
    if debug >= 3:
        print(cyan + "soilOccupationChange() : " + endC + "Nettoyage des traitements précédents." + endC + '\n')
    removeVectorFile(output_plot_vector, format_vector=format_vector)
    removeFile(output_evolution_text_file)
    cleanTempData(temp_directory)
    dropDatabase(postgis_database_name, user_name=postgis_user_name, password=postgis_password, ip_host=postgis_ip_host, num_port=postgis_num_port, schema_name=postgis_schema_name)

    #############
    # Etape 0/2 # Préparation des traitements
    #############

    print(cyan + "soilOccupationChange() : " + bold + green + "ETAPE 0/2 - Début de la préparation des traitements." + endC + '\n')

    # Découpage du parcellaire à la zone d'étude
    cutVector(footprint_vector, input_plot_vector, plot_vector_cut, overwrite=overwrite, format_vector=format_vector)

    # Récupération du nom des champs dans le fichier source (pour isoler les champs nouvellement créés par la suite, et les renommer)
    attr_names_list_origin = getAttributeNameList(plot_vector_cut, format_vector=format_vector)
    new_attr_names_list_origin = attr_names_list_origin

    # Préparation de PostGIS
    createDatabase(postgis_database_name, user_name=postgis_user_name, password=postgis_password, ip_host=postgis_ip_host, num_port=postgis_num_port, schema_name=postgis_schema_name)

    print(cyan + "soilOccupationChange() : " + bold + green + "ETAPE 0/2 - Fin de la préparation des traitements." + endC + '\n')

    #############
    # Etape 1/2 # Calculs des statistiques à tx
    #############

    print(cyan + "soilOccupationChange() : " + bold + green + "ETAPE 1/2 - Début des calculs des statistiques à tx." + endC + '\n')

    len_tx = len(input_tx_files_list)
    tx = 0

    # Boucle sur les fichiers d'entrés à t0+x
    for input_tx_file in input_tx_files_list:
        if debug >= 3:
            print(cyan + "soilOccupationChange() : " + endC + bold + "Calcul des statistiques à tx %s/%s." % (tx+1, len_tx) + endC + '\n')

        # Statistiques OCS par parcelle
        statisticsVectorRaster(input_tx_file, plot_vector_cut, "", 1, True, False, False, [], [], class_label_dico, path_time_log, clean_small_polygons=True, format_vector=format_vector, save_results_intermediate=save_results_intermediate, overwrite=overwrite)

        # Récupération du nom des champs dans le fichier parcellaire (avec les champs créés précédemment dans CVR)
        attr_names_list_tx = getAttributeNameList(plot_vector_cut, format_vector=format_vector)

        # Isolement des nouveaux champs issus du CVR
        fields_name_list  = []
        for attr_name in attr_names_list_tx:
            if attr_name not in new_attr_names_list_origin:
                fields_name_list.append(attr_name)

        # Gestion des nouveaux noms des champs issus du CVR
        new_fields_name_list  = []
        for field_name in fields_name_list:
            new_field_name = 't%s_' % tx + field_name
            new_field_name = new_field_name[:10]
            new_fields_name_list.append(new_field_name)
            new_attr_names_list_origin.append(new_field_name)

        # Renommage des champs issus du CVR, pour le relancer par la suite sur d'autres dates
        renameFieldsVector(plot_vector_cut, fields_name_list, new_fields_name_list, format_vector=format_vector)

        tx += 1

    print(cyan + "soilOccupationChange() : " + bold + green + "ETAPE 1/2 - Fin des calculs des statistiques à tx." + endC + '\n')

    #############
    # Etape 2/2 # Caractérisation des changements
    #############

    print(cyan + "soilOccupationChange() : " + bold + green + "ETAPE 2/2 - Début de la caractérisation des changements." + endC + '\n')

    # Pré-traitements dans PostGIS
    plot_table = importVectorByOgr2ogr(postgis_database_name, plot_vector_cut, plot_table, user_name=postgis_user_name, password=postgis_password, ip_host=postgis_ip_host, num_port=postgis_num_port, schema_name=postgis_schema_name, epsg=epsg, codage=postgis_encoding)
    connection = openConnection(postgis_database_name, user_name=postgis_user_name, password=postgis_password, ip_host=postgis_ip_host, num_port=postgis_num_port, schema_name=postgis_schema_name)

    # Requête SQL pour le calcul de la surface des parcelles
    sql_query = "ALTER TABLE %s ADD COLUMN %s REAL;\n" % (plot_table, AREA_FIELD)
    sql_query += "UPDATE %s SET %s = ST_Area(%s);\n" % (plot_table, AREA_FIELD, GEOM_FIELD)

    # Boucle sur les évolutions à quantifier
    temp_field = 1
    for evolution in evolutions_list:
        evolution_split = evolution.split(':')
        idx_bef = int(evolution_split[0])
        idx_aft = int(evolution_split[1])
        label = int(evolution_split[2])
        evol = abs(int(evolution_split[3]))
        evol_s = abs(int(evolution_split[4]))
        combi = evolution_split[5]
        class_name = class_label_dico[label]
        def_evo_field = "def_evo_%s" % str(temp_field)
        if debug >= 3:
            print(cyan + "soilOccupationChange() : " + endC + bold + "Caractérisation des changements t%s/t%s pour la classe '%s' (%s)." % (idx_bef, idx_aft, class_name, label) + endC + '\n')

        if evol != 0 or evol_s != 0:

            # Gestion de l'évolution via le taux
            evol_str = str(evol) + ' %'
            evo_field = "evo_%s" % str(temp_field)
            t0_field = 't%s_' % idx_bef + class_name.lower()[:7]
            t1_field = 't%s_' % idx_aft + class_name.lower()[:7]

            # Gestion de l'évolution via la surface
            evol_s_str = str(evol_s) + ' m²'
            evo_s_field = "evo_s_%s" % str(temp_field)
            t0_s_field = 't%s_s_' % idx_bef + class_name.lower()[:5]
            t1_s_field = 't%s_s_' % idx_aft + class_name.lower()[:5]

            # Requête SQL pour le calcul brut de l'évolution
            sql_query += "ALTER TABLE %s ADD COLUMN %s REAL;\n" % (plot_table, evo_field)
            sql_query += "UPDATE %s SET %s = %s - %s;\n" % (plot_table, evo_field, t1_field, t0_field)
            sql_query += "ALTER TABLE %s ADD COLUMN %s REAL;\n" % (plot_table, evo_s_field)
            sql_query += "UPDATE %s SET %s = %s - %s;\n" % (plot_table, evo_s_field, t1_s_field, t0_s_field)
            sql_query += "ALTER TABLE %s ADD COLUMN %s VARCHAR;\n" % (plot_table, def_evo_field)
            sql_query += "UPDATE %s SET %s = 't%s a t%s - %s - aucune evolution';\n" % (plot_table, def_evo_field, idx_bef, idx_aft, class_name)

            # Si évolution à la fois via taux et via surface
            if evol != 0 and evol_s != 0:
                text_evol = "taux à %s" % evol_str
                if combi == 'and':
                    text_evol += " ET "
                elif combi == 'or':
                    text_evol += " OU "
                text_evol += "surface à %s" % evol_s_str
                sql_where_pos = "%s >= %s %s %s >= %s" % (evo_field, evol, combi, evo_s_field, evol_s)
                sql_where_neg = "%s <= -%s %s %s <= -%s" % (evo_field, evol, combi, evo_s_field, evol_s)

            # Si évolution uniquement via taux
            elif evol != 0:
                text_evol = "taux à %s" % evol_str
                sql_where_pos = "%s >= %s" % (evo_field, evol)
                sql_where_neg = "%s <= -%s" % (evo_field, evol)

            # Si évolution uniquement via surface
            elif evol_s != 0:
                text_evol = "surface à %s" % evol_s_str
                sql_where_pos = "%s >= %s" % (evo_s_field, evol_s)
                sql_where_neg = "%s <= -%s" % (evo_s_field, evol_s)

            sql_query += "UPDATE %s SET %s = 't%s a t%s - %s - evolution positive' WHERE %s;\n" % (plot_table, def_evo_field, idx_bef, idx_aft, class_name, sql_where_pos)
            sql_query += "UPDATE %s SET %s = 't%s a t%s - %s - evolution negative' WHERE %s;\n" % (plot_table, def_evo_field, idx_bef, idx_aft, class_name, sql_where_neg)

            # Ajout des paramètres de l'évolution quantifiée (temporalités, classe, taux/surface) au fichier texte de sortie
            text = "%s --> évolution entre t%s et t%s, pour la classe '%s' (label %s) :\n" % (def_evo_field, idx_bef, idx_aft, class_name, label)
            text += "    %s --> taux d'évolution brut" % evo_field + " (%)\n"
            text += "    %s --> surface d'évolution brute" % evo_s_field + " (m²)\n"
            text += "Evolution quantifiée : %s\n" % text_evol
            appendTextFileCR(output_evolution_text_file, text)
            temp_field += 1

    # Traitements SQL de l'évolution des classes OCS
    executeQuery(connection, sql_query)
    closeConnection(connection)
    exportVectorByOgr2ogr(postgis_database_name, output_plot_vector, plot_table, user_name=postgis_user_name, password=postgis_password, ip_host=postgis_ip_host, num_port=postgis_num_port, schema_name=postgis_schema_name, format_type=format_vector)

    print(cyan + "soilOccupationChange() : " + bold + green + "ETAPE 2/2 - Fin de la caractérisation des changements." + endC + '\n')

    # Suppression des fichiers temporaires
    if not save_results_intermediate:
        if debug >= 3:
            print(cyan + "soilOccupationChange() : " + endC + "Suppression des fichiers temporaires." + endC + '\n')
        deleteDir(temp_directory)
        dropDatabase(postgis_database_name, user_name=postgis_user_name, password=postgis_password, ip_host=postgis_ip_host, num_port=postgis_num_port, schema_name=postgis_schema_name)

    print(cyan + "soilOccupationChange() : " + bold + green + "FIN DES TRAITEMENTS" + endC + '\n')

    # Mise à jour du log
    ending_event = "soilOccupationChange() : Fin du traitement : "
    timeLine(path_time_log, ending_event)

    return
def classifyVector(vector_input, classif_vector_output, list_feat, expression, input_cfield, output_cfield, path_time_log, format_vector='ESRI Shapefile', extension_vector=".shp", extension_xml = ".xml", extension_model = ".model", save_results_intermediate=False, overwrite=True):

    # Constante
    SUFFIX_OUT = "_out"
    SUFFIX_TRAIN = "_train"

    # Mise à jour du Log
    starting_event = "classifyVector() : Classification vecteur starting : "
    timeLine(path_time_log,starting_event)

    print(endC)
    print(bold + green + "## START : classifyVector" + endC)
    print(endC)

    if debug >= 2:
        print(bold + green + "classifyVector() : Variables dans la fonction" + endC)
        print(cyan + "classifyVector() : " + endC + "vector_input : " + str(vector_input) + endC)
        print(cyan + "classifyVector() : " + endC + "classif_vector_output : " + str(classif_vector_output) + endC)
        print(cyan + "classifyVector() : " + endC + "list_feat : " + str(list_feat) + endC)
        print(cyan + "classifyVector() : " + endC + "input_cfield : " + str(input_cfield) + endC)
        print(cyan + "classifyVector() : " + endC + "output_cfield : " + str(output_cfield) + endC)
        print(cyan + "classifyVector() : " + endC + "expression : " + str(expression) + endC)
        print(cyan + "classifyVector() : " + endC + "path_time_log : " + str(path_time_log) + endC)
        print(cyan + "classifyVector() : " + endC + "format_vector : " + str(format_vector) + endC)
        print(cyan + "classifyVector() : " + endC + "extension_vector : " + str(extension_vector) + endC)
        print(cyan + "classifyVector() : " + endC + "extension_xml : " + str(extension_xml) + endC)
        print(cyan + "classifyVector() : " + endC + "extension_model : " + str(extension_model) + endC)
        print(cyan + "classifyVector() : " + endC + "save_results_intermediate : " + str(save_results_intermediate) + endC)
        print(cyan + "classifyVector() : " + endC + "overwrite : " + str(overwrite) + endC)

    # Creation des chemins d'enregistrement des calculs:
    repertory_output = os.path.dirname(vector_input)
    name = os.path.splitext(os.path.basename(vector_input))[0]
    layer_name = name
    vector_output_train_tmp = repertory_output + os.sep + name + SUFFIX_TRAIN + extension_vector
    outstats = repertory_output + os.sep + name + extension_xml
    model = repertory_output + os.sep + name + extension_model

    # Vérification si le vecteur de sortie est définie
    if classif_vector_output == "" :
        classif_vector_output = repertory_output + os.sep + name + SUFFIX_OUT + extension_vector

    # Vérification de l'existence d'une couche vecteur classée
        check = os.path.isfile(classif_vector_output)

    # Si oui et si la vérification est activée, passage à l'étape suivante
    if check and not overwrite :
        print(cyan + "classifyVector() : " + bold + green +  "vector already classified" + "." + endC)
    # Si non ou si la vérification est désactivée, application du filtre
    else:
        # Tentative de suppresion du fichier
        try:
            removeVectorFile(classif_vector_output, format_vector=format_vector)
        except Exception:
            # Ignore l'exception levée si le fichier n'existe pas (et ne peut donc pas être supprimé)
            pass

        if debug >= 3:
            print(cyan + "classifyVector() : " + bold + green +  "Applying classified segmenation", "..." , '\n' + endC)

        # Recuperation du driver pour le format shape fichier entrée
        driver_input = ogr.GetDriverByName(format_vector)

        # Ouverture du fichier shape en lecture
        data_source_input = driver_input.Open(vector_input, 1) # 1 means writeable.

        if data_source_input is None:
            print(cyan + "classifyVector() : " + bold + red + "Impossible d'ouvrir le fichier vecteur : " + vector_input + endC, file=sys.stderr)
            sys.exit(1) # exit with an error code

        # Recuperer la couche (une couche contient les segments)
        layer_input = data_source_input.GetLayer(0)

        # Vérification de l'existence de la colonne col (retour = -1 : elle n'existe pas)
        layer_definition = layer_input.GetLayerDefn() # GetLayerDefn => returns the field names of the user defined (created) fields
        id_field = layer_definition.GetFieldIndex(output_cfield)
        if id_field != -1 :
            print(cyan + "classifyVector() : " + bold + yellow + "Attention le champs de classification existe déjà" + output_cfield + endC)
        layer_input.DeleteField(id_field)

        # Fermeture du fichier vector_input
        layer_input.SyncToDisk()
        data_source_input.Destroy()

        # Selection du fichier vecteur d'apprentissage
        if overwrite:
            overwrite_str = "-overwrite"
        command = "ogr2ogr -f '%s' %s %s  %s -where \"%s\" " % (format_vector, overwrite_str, vector_output_train_tmp ,vector_input, expression)
        if debug >= 2:
           print(command)
        exit_code = os.system(command)
        if exitCode!= 0:
            print(command)
            raise NameError (cyan + "classifyVector() : " + bold + red + "ogr2ogr. Selection du fichier vecteur d'apprentissage erreur." + endC)

        # Les champs utilisés pour la calssification
        # Exemple : list_feat = ['meanB0' ,  'meanB1' ,   'meanB2' ,  'meanB3' ,  'varB0' ,  'varB1' ,  'varB2' , 'varB3']
        feat = str ("-feat")
        feat +=  " " +  str(''.join(list_feat))

        # Classification :
        # 1) Calculer les statistiques
        command = "otbcli_ComputeOGRLayersFeaturesStatistics -inshp %s -outstats %s " %(segmented_input, outstats)
        command += "%s" %(feat)
        if debug >= 2:
           print(command)
        exitCode = os.system(command)
        if exitCode!= 0:
            print(command)
            raise NameError (cyan + "classifyVector() : " + bold + red + "otbcli_ComputeOGRLayersFeaturesStatistics. See error message above." + endC)

        # 2) Générer le model
        command = "otbcli_TrainVectorClassifier -io.vd %s -io.stats %s -io.out %s  -cfield %s " %(vector_output_train_tmp, outstats, model, input_cfield)
        command += "%s" %(feat)
        if debug >= 2:
           print(command)
        exitCode = os.system(command)
        if exitCode!= 0:
            print(command)
            raise NameError (cyan + "classifyVector() : " + bold + red + "otbcli_TrainVectorClassifier. See error message above." + endC)

        # 3) Produire la classifictaion
        command = "otbcli_VectorClassifier -in %s -instat  %s  -model %s  -cfield %s  -out %s " %(segmented_input, outstats, model,  output_cfield, classif_vector_output)
        command += "%s" %(feat)
        if debug >= 2:
           print(command)
        exitCode = os.system(command)
        if exitCode!= 0:
            print(command)
            raise NameError (cyan + "classifyVector() : " + bold + red + "otbcli_VectorClassifier. See error message above." + endC)

        # Suppression des données intermédiaires
        if not save_results_intermediate:
            # Supression du fichier temporaire de segmentation
            if os.path.isfile(vector_output_train_tmp) :
                removeFile(vector_output_train_tmp)

    print(endC)
    print(bold + green + "## END :  classifyVector" + endC)
    print(endC)

    # Mise à jour du Log
    ending_event = "classifyVector() : Classifictaion vector  ending : "
    timeLine(path_time_log,ending_event)
    return
def occupationIndicator(input_grid,
                        output_grid,
                        class_label_dico_out,
                        input_vector_classif,
                        field_classif_name,
                        input_soil_occupation,
                        input_height_model,
                        class_build_list,
                        class_road_list,
                        class_baresoil_list,
                        class_water_list,
                        class_vegetation_list,
                        class_high_vegetation_list,
                        class_low_vegetation_list,
                        epsg=2154,
                        no_data_value=0,
                        format_raster='GTiff',
                        format_vector='ESRI Shapefile',
                        extension_raster='.tif',
                        extension_vector='.shp',
                        path_time_log='',
                        save_results_intermediate=False,
                        overwrite=True):

    if debug >= 3:
        print(
            '\n' + bold + green +
            "Calcul d'indicateurs du taux de classes OCS - Variables dans la fonction :"
            + endC)
        print(cyan + "    occupationIndicator() : " + endC + "input_grid : " +
              str(input_grid) + endC)
        print(cyan + "    occupationIndicator() : " + endC + "output_grid : " +
              str(output_grid) + endC)
        print(cyan + "    occupationIndicator() : " + endC +
              "class_label_dico_out : " + str(class_label_dico_out) + endC)
        print(cyan + "    occupationIndicator() : " + endC +
              "input_vector_classif : " + str(input_vector_classif) + endC)
        print(cyan + "    occupationIndicator() : " + endC +
              "field_classif_name : " + str(field_classif_name) + endC)
        print(cyan + "    occupationIndicator() : " + endC +
              "input_soil_occupation : " + str(input_soil_occupation) + endC)
        print(cyan + "    occupationIndicator() : " + endC +
              "input_height_model : " + str(input_height_model) + endC)
        print(cyan + "    occupationIndicator() : " + endC +
              "class_build_list : " + str(class_build_list) + endC)
        print(cyan + "    occupationIndicator() : " + endC +
              "class_road_list : " + str(class_road_list) + endC)
        print(cyan + "    occupationIndicator() : " + endC +
              "class_baresoil_list : " + str(class_baresoil_list) + endC)
        print(cyan + "    occupationIndicator() : " + endC +
              "class_water_list : " + str(class_water_list) + endC)
        print(cyan + "    occupationIndicator() : " + endC +
              "class_vegetation_list : " + str(class_vegetation_list) + endC)
        print(cyan + "    occupationIndicator() : " + endC +
              "class_high_vegetation_list : " +
              str(class_high_vegetation_list) + endC)
        print(cyan + "    occupationIndicator() : " + endC +
              "class_low_vegetation_list : " + str(class_low_vegetation_list) +
              endC)
        print(cyan + "    occupationIndicator() : " + endC + "epsg : " +
              str(epsg) + endC)
        print(cyan + "    occupationIndicator() : " + endC +
              "no_data_value : " + str(no_data_value) + endC)
        print(cyan + "    occupationIndicator() : " + endC +
              "format_raster : " + str(format_raster) + endC)
        print(cyan + "    occupationIndicator() : " + endC +
              "format_vector : " + str(format_vector) + endC)
        print(cyan + "    occupationIndicator() : " + endC +
              "extension_raster : " + str(extension_raster) + endC)
        print(cyan + "    occupationIndicator() : " + endC +
              "extension_vector : " + str(extension_vector) + endC)
        print(cyan + "    occupationIndicator() : " + endC +
              "path_time_log : " + str(path_time_log) + endC)
        print(cyan + "    occupationIndicator() : " + endC +
              "save_results_intermediate : " + str(save_results_intermediate) +
              endC)
        print(cyan + "    occupationIndicator() : " + endC + "overwrite : " +
              str(overwrite) + endC + '\n')

    # Définition des constantes
    CODAGE_8BITS = 'uint8'
    CODAGE_FLOAT = 'float'
    NODATA_FIELD = 'nodata'

    PREFIX_S = 'S_'
    SUFFIX_TEMP = '_temp'
    SUFFIX_RASTER = '_raster'
    SUFFIX_HEIGHT = '_height'
    SUFFIX_VEGETATION = '_vegetation'

    VEG_MEAN_FIELD = 'veg_h_mean'
    VEG_MAX_FIELD = 'veg_h_max'
    VEG_RATE_FIELD = 'veg_h_rate'
    MAJ_OCS_FIELD = 'class_OCS'

    BUILT_FIELD, BUILT_LABEL = 'built', 1
    MINERAL_FIELD, MINERAL_LABEL = 'mineral', 2
    BARESOIL_FIELD, BARESOIL_LABEL = 'baresoil', 3
    WATER_FIELD, WATER_LABEL = 'water', 4
    VEGETATION_FIELD, VEGETATION_LABEL = 'veget', 5
    HIGH_VEGETATION_FIELD, HIGH_VEGETATION_LABEL = 'high_veg', 6
    LOW_VEGETATION_FIELD, LOW_VEGETATION_LABEL = 'low_veg', 7

    # Mise à jour du log
    starting_event = "occupationIndicator() : Début du traitement : "
    timeLine(path_time_log, starting_event)

    print(cyan + "occupationIndicator() : " + bold + green +
          "DEBUT DES TRAITEMENTS" + endC + '\n')

    # Définition des variables 'basename'
    output_grid_basename = os.path.basename(os.path.splitext(output_grid)[0])
    output_grid_dirname = os.path.dirname(output_grid)
    soil_occupation_basename = os.path.basename(
        os.path.splitext(input_soil_occupation)[0])

    # Définition des variables temp
    temp_directory = output_grid_dirname + os.sep + output_grid_basename
    temp_grid = temp_directory + os.sep + output_grid_basename + SUFFIX_TEMP + extension_vector
    temp_soil_occupation = temp_directory + os.sep + soil_occupation_basename + SUFFIX_TEMP + SUFFIX_RASTER + extension_raster
    temp_height_vegetation = temp_directory + os.sep + output_grid_basename + SUFFIX_HEIGHT + SUFFIX_VEGETATION + extension_raster

    # Nettoyage des traitements précédents
    if overwrite:
        if debug >= 3:
            print(cyan + "occupationIndicator() : " + endC +
                  "Nettoyage des traitements précédents." + endC + '\n')
        removeFile(output_grid)
        cleanTempData(temp_directory)
    else:
        if os.path.exists(output_grid):
            raise NameError(
                cyan + "occupationIndicator() : " + bold + yellow +
                "Le fichier de sortie existe déjà et ne sera pas regénéré." +
                endC + '\n')
        pass

    #############
    # Etape 0/3 # Préparation des traitements
    #############

    print(cyan + "occupationIndicator() : " + bold + green +
          "ETAPE 0/3 - Début de la préparation des traitements." + endC + '\n')

    # Rasterisation de l'information de classification (OCS) si au format vecteur en entrée
    if input_vector_classif != "":
        if debug >= 3:
            print(cyan + "occupationIndicator() : " + endC + bold +
                  "Rasterisation de l'OCS vecteur." + endC + '\n')
        reference_image = input_soil_occupation
        soil_occupation_vector_basename = os.path.basename(
            os.path.splitext(input_vector_classif)[0])
        input_soil_occupation = temp_directory + os.sep + soil_occupation_vector_basename + SUFFIX_RASTER + extension_raster
        command = "otbcli_Rasterization -in %s -out %s %s -im %s -background 0 -mode attribute -mode.attribute.field %s" % (
            input_vector_classif, input_soil_occupation, CODAGE_8BITS,
            reference_image, field_classif_name)
        if debug >= 3:
            print(command)
        exit_code = os.system(command)
        if exit_code != 0:
            raise NameError(
                cyan + "occupationIndicator() : " + bold + red +
                "Erreur lors de la rasterisation de l'OCS vecteur." + endC)

    # Analyse de la couche OCS raster
    class_other_list = identifyPixelValues(input_soil_occupation)
    no_data_ocs = getNodataValueImage(input_soil_occupation, 1)
    if no_data_ocs != None:
        no_data_value = no_data_ocs

    # Affectation de nouveaux codes de classification
    divide_vegetation_classes = False
    if class_high_vegetation_list != [] and class_low_vegetation_list != []:
        divide_vegetation_classes = True

    col_to_delete_list = [
        "minority", PREFIX_S + NODATA_FIELD, PREFIX_S + BUILT_FIELD,
        PREFIX_S + MINERAL_FIELD, PREFIX_S + BARESOIL_FIELD,
        PREFIX_S + WATER_FIELD
    ]
    class_label_dico = {
        int(no_data_value): NODATA_FIELD,
        int(BUILT_LABEL): BUILT_FIELD,
        int(MINERAL_LABEL): MINERAL_FIELD,
        int(BARESOIL_LABEL): BARESOIL_FIELD,
        int(WATER_LABEL): WATER_FIELD
    }
    if not divide_vegetation_classes:
        class_label_dico[int(VEGETATION_LABEL)] = VEGETATION_FIELD
        col_to_delete_list.append(PREFIX_S + VEGETATION_FIELD)
    else:
        class_label_dico[int(HIGH_VEGETATION_LABEL)] = HIGH_VEGETATION_FIELD
        class_label_dico[int(LOW_VEGETATION_LABEL)] = LOW_VEGETATION_FIELD
        col_to_delete_list.append(PREFIX_S + HIGH_VEGETATION_FIELD)
        col_to_delete_list.append(PREFIX_S + LOW_VEGETATION_FIELD)

    # Gestion de la réaffectation des classes
    if debug >= 3:
        print(cyan + "occupationIndicator() : " + endC + bold +
              "Reaffectation du raster OCS." + endC + '\n')

    reaff_class_list = []
    macro_reaff_class_list = []

    for label in class_build_list:
        if label in class_other_list:
            class_other_list.remove(label)
        reaff_class_list.append(label)
        macro_reaff_class_list.append(BUILT_LABEL)

    for label in class_road_list:
        if label in class_other_list:
            class_other_list.remove(label)
        reaff_class_list.append(label)
        macro_reaff_class_list.append(MINERAL_LABEL)

    for label in class_baresoil_list:
        if label in class_other_list:
            class_other_list.remove(label)
        reaff_class_list.append(label)
        macro_reaff_class_list.append(BARESOIL_LABEL)

    for label in class_water_list:
        if label in class_other_list:
            class_other_list.remove(label)
        reaff_class_list.append(label)
        macro_reaff_class_list.append(WATER_LABEL)

    if not divide_vegetation_classes:
        for label in class_vegetation_list:
            if label in class_other_list:
                class_other_list.remove(label)
            reaff_class_list.append(label)
            macro_reaff_class_list.append(VEGETATION_LABEL)
    else:
        for label in class_high_vegetation_list:
            if label in class_other_list:
                class_other_list.remove(label)
            reaff_class_list.append(label)
            macro_reaff_class_list.append(HIGH_VEGETATION_LABEL)
        for label in class_low_vegetation_list:
            if label in class_other_list:
                class_other_list.remove(label)
            reaff_class_list.append(label)
            macro_reaff_class_list.append(LOW_VEGETATION_LABEL)

    # Reste des valeurs de pixel nom utilisé
    for label in class_other_list:
        reaff_class_list.append(label)
        macro_reaff_class_list.append(no_data_value)

    reallocateClassRaster(input_soil_occupation, temp_soil_occupation,
                          reaff_class_list, macro_reaff_class_list,
                          CODAGE_8BITS)

    print(cyan + "occupationIndicator() : " + bold + green +
          "ETAPE 0/3 - Fin de la préparation des traitements." + endC + '\n')

    #############
    # Etape 1/3 # Calcul des indicateurs de taux de classes OCS
    #############

    print(
        cyan + "occupationIndicator() : " + bold + green +
        "ETAPE 1/3 - Début du calcul des indicateurs de taux de classes OCS." +
        endC + '\n')

    if debug >= 3:
        print(cyan + "occupationIndicator() : " + endC + bold +
              "Calcul des indicateurs de taux de classes OCS." + endC + '\n')

    statisticsVectorRaster(temp_soil_occupation, input_grid, temp_grid, 1,
                           True, True, False, col_to_delete_list, [],
                           class_label_dico, path_time_log, True,
                           format_vector, save_results_intermediate, overwrite)

    # Fusion des classes végétation dans le cas où haute et basse sont séparées (pour utilisation du taux de végétation dans le logigramme)
    if divide_vegetation_classes:
        temp_grid_v2 = os.path.splitext(
            temp_grid)[0] + "_v2" + extension_vector
        sql_statement = "SELECT *, (%s + %s) AS %s FROM %s" % (
            HIGH_VEGETATION_FIELD, LOW_VEGETATION_FIELD, VEGETATION_FIELD,
            os.path.splitext(os.path.basename(temp_grid))[0])
        os.system("ogr2ogr -sql '%s' -dialect SQLITE %s %s" %
                  (sql_statement, temp_grid_v2, temp_grid))
        removeVectorFile(temp_grid, format_vector=format_vector)
        copyVectorFile(temp_grid_v2, temp_grid, format_vector=format_vector)

    print(cyan + "occupationIndicator() : " + bold + green +
          "ETAPE 1/3 - Fin du calcul des indicateurs de taux de classes OCS." +
          endC + '\n')

    #############
    # Etape 2/3 # Calcul de l'indicateur de "hauteur de végétation"
    #############

    print(
        cyan + "occupationIndicator() : " + bold + green +
        "ETAPE 2/3 - Début du calcul de l'indicateur de \"hauteur de végétation\"."
        + endC + '\n')

    computeVegetationHeight(
        temp_grid, output_grid, temp_soil_occupation, input_height_model,
        temp_height_vegetation, divide_vegetation_classes, VEGETATION_LABEL,
        HIGH_VEGETATION_LABEL, LOW_VEGETATION_LABEL, HIGH_VEGETATION_FIELD,
        LOW_VEGETATION_FIELD, VEG_MEAN_FIELD, VEG_MAX_FIELD, VEG_RATE_FIELD,
        CODAGE_FLOAT, SUFFIX_TEMP, no_data_value, format_vector, path_time_log,
        save_results_intermediate, overwrite)

    print(
        cyan + "occupationIndicator() : " + bold + green +
        "ETAPE 2/3 - Fin du calcul de l'indicateur de \"hauteur de végétation\"."
        + endC + '\n')

    #############
    # Etape 3/3 # Calcul de l'indicateur de classe majoritaire
    #############

    print(
        cyan + "occupationIndicator() : " + bold + green +
        "ETAPE 3/3 - Début du calcul de l'indicateur de classe majoritaire." +
        endC + '\n')

    if input_height_model != "":
        computeMajorityClass(output_grid, temp_directory, NODATA_FIELD,
                             BUILT_FIELD, MINERAL_FIELD, BARESOIL_FIELD,
                             WATER_FIELD, VEGETATION_FIELD,
                             HIGH_VEGETATION_FIELD, LOW_VEGETATION_FIELD,
                             MAJ_OCS_FIELD, VEG_MEAN_FIELD,
                             class_label_dico_out, format_vector,
                             extension_vector, overwrite)
    else:
        print(
            cyan + "occupationIndicator() : " + bold + yellow +
            "Pas de calcul de l'indicateur de classe majoritaire demandé (pas de MNH en entrée)."
            + endC + '\n')

    print(cyan + "occupationIndicator() : " + bold + green +
          "ETAPE 3/3 - Fin du calcul de l'indicateur de classe majoritaire." +
          endC + '\n')

    ####################################################################

    # Suppression des fichiers temporaires
    if not save_results_intermediate:
        if debug >= 3:
            print(cyan + "occupationIndicator() : " + endC +
                  "Suppression des fichiers temporaires." + endC + '\n')
        deleteDir(temp_directory)

    print(cyan + "occupationIndicator() : " + bold + green +
          "FIN DES TRAITEMENTS" + endC + '\n')

    # Mise à jour du log
    ending_event = "occupationIndicator() : Fin du traitement : "
    timeLine(path_time_log, ending_event)

    return 0
def classificationLczSql(input_division, input_hre, input_ocs, output_lcz, id_field='id', epsg=2154, format_vector='ESRI Shapefile', postgis_ip_host='localhost', postgis_num_port=5432, postgis_user_name='postgres', postgis_password='******', postgis_database_name='lcz_db', postgis_schema_name='public', postgis_encoding='UTF-8', path_time_log='', save_results_intermediate=False, overwrite=True):

    if debug >= 3:
        print('\n' + bold + green + "Classification LCZ via SQL - Variables dans la fonction :" + endC)
        print(cyan + "    classificationLczSql() : " + endC + "input_division : " + str(input_division) + endC)
        print(cyan + "    classificationLczSql() : " + endC + "input_hre : " + str(input_hre) + endC)
        print(cyan + "    classificationLczSql() : " + endC + "input_ocs : " + str(input_ocs) + endC)
        print(cyan + "    classificationLczSql() : " + endC + "output_lcz : " + str(output_lcz) + endC)
        print(cyan + "    classificationLczSql() : " + endC + "id_field : " + str(id_field) + endC)
        print(cyan + "    classificationLczSql() : " + endC + "epsg : " + str(epsg) + endC)
        print(cyan + "    classificationLczSql() : " + endC + "format_vector : " + str(format_vector) + endC)
        print(cyan + "    classificationLczSql() : " + endC + "postgis_ip_host : " + str(postgis_ip_host) + endC)
        print(cyan + "    classificationLczSql() : " + endC + "postgis_num_port : " + str(postgis_num_port) + endC)
        print(cyan + "    classificationLczSql() : " + endC + "postgis_user_name : " + str(postgis_user_name) + endC)
        print(cyan + "    classificationLczSql() : " + endC + "postgis_password : "******"    classificationLczSql() : " + endC + "postgis_database_name : " + str(postgis_database_name) + endC)
        print(cyan + "    classificationLczSql() : " + endC + "postgis_schema_name : " + str(postgis_schema_name) + endC)
        print(cyan + "    classificationLczSql() : " + endC + "postgis_encoding : " + str(postgis_encoding) + endC)
        print(cyan + "    classificationLczSql() : " + endC + "path_time_log : " + str(path_time_log) + endC)
        print(cyan + "    classificationLczSql() : " + endC + "save_results_intermediate : " + str(save_results_intermediate) + endC)
        print(cyan + "    classificationLczSql() : " + endC + "overwrite : " + str(overwrite) + endC + '\n')

    # Définition des constantes
    SUFFIX_TEMP = '_temp'

    # Mise à jour du log
    starting_event = "classificationLczSql() : Début du traitement : "
    timeLine(path_time_log, starting_event)

    print(cyan + "classificationLczSql() : " + bold + green + "DEBUT DES TRAITEMENTS" + endC + '\n')

    # Définition des variables
    hre_field = 'mean_h'
    bur_field = 'built'
    ror_field = 'mineral'
    bsr_field = 'baresoil'
    war_field = 'water'
    ver_field = 'veget'
    vhr_field = 'veg_h_rate'
    lcz_field = 'lcz'
    div_table = 'i_div'
    hre_table = 'i_hre'
    ocs_table = 'i_ocs'
    lcz_table = 'o_lcz'

    # Nettoyage des traitements précédents
    if overwrite:
        if debug >= 3:
            print(cyan + "classificationLczSql() : " + endC + "Nettoyage des traitements précédents." + endC + '\n')
        removeVectorFile(output_lcz)
        dropDatabase(postgis_database_name, user_name=postgis_user_name, password=postgis_password, ip_host=postgis_ip_host, num_port=postgis_num_port, schema_name=postgis_schema_name)
    else:
        if os.path.exists(output_lcz):
            print(cyan + "classificationLczSql() : " + bold + yellow + "Le fichier de sortie existe déjà et ne sera pas regénéré." + endC + '\n')
            raise
        pass

    ####################################################################

    createDatabase(postgis_database_name, user_name=postgis_user_name, password=postgis_password, ip_host=postgis_ip_host, num_port=postgis_num_port, schema_name=postgis_schema_name)
    div_table = importVectorByOgr2ogr(postgis_database_name, input_division, div_table, user_name=postgis_user_name, password=postgis_password, ip_host=postgis_ip_host, num_port=postgis_num_port, schema_name=postgis_schema_name, epsg=epsg, codage=postgis_encoding)
    hre_table = importVectorByOgr2ogr(postgis_database_name, input_hre, hre_table, user_name=postgis_user_name, password=postgis_password, ip_host=postgis_ip_host, num_port=postgis_num_port, schema_name=postgis_schema_name, epsg=epsg, codage=postgis_encoding)
    ocs_table = importVectorByOgr2ogr(postgis_database_name, input_ocs, ocs_table, user_name=postgis_user_name, password=postgis_password, ip_host=postgis_ip_host, num_port=postgis_num_port, schema_name=postgis_schema_name, epsg=epsg, codage=postgis_encoding)
    connection = openConnection(postgis_database_name, user_name=postgis_user_name, password=postgis_password, ip_host=postgis_ip_host, num_port=postgis_num_port, schema_name=postgis_schema_name)

    query = "DROP TABLE IF EXISTS %s;\n" % lcz_table
    query += "CREATE TABLE %s AS\n" % lcz_table
    query += "    SELECT d.%s AS %s, h.%s AS hre, o.%s AS bur, o.%s AS ror, o.%s AS bsr, o.%s AS war, o.%s AS ver, o.%s AS vhr, d.geom AS geom\n" % (id_field, id_field, hre_field, bur_field, ror_field, bsr_field, war_field, ver_field, vhr_field)
    query += "    FROM %s AS d, %s AS h, %s AS o\n" % (div_table, hre_table, ocs_table)
    query += "    WHERE d.%s = h.%s AND d.%s = o.%s;\n" % (id_field, id_field, id_field, id_field)

    query += "ALTER TABLE %s ADD COLUMN %s VARCHAR(8);\n" % (lcz_table, lcz_field)
    query += "UPDATE %s SET %s = 'urban' WHERE bur > 5;\n" % (lcz_table, lcz_field)
    query += "UPDATE %s SET %s = 'natural' WHERE bur <= 5;\n" % (lcz_table, lcz_field)
    query += "UPDATE %s SET %s = 'low_ocs' WHERE %s = 'natural' AND (bur + ror + bsr + war + ver) <= 60;\n" % (lcz_table, lcz_field, lcz_field)

    query += "UPDATE %s SET %s = '1' WHERE %s = 'urban' AND (hre > 25)               AND (bur > 40);\n"                                     % (lcz_table, lcz_field, lcz_field)
    query += "UPDATE %s SET %s = '4' WHERE %s = 'urban' AND (hre > 25)               AND (bur <= 40);\n"                                    % (lcz_table, lcz_field, lcz_field)
    query += "UPDATE %s SET %s = '2' WHERE %s = 'urban' AND (hre > 10 AND hre <= 25) AND (bur > 40);\n"                                     % (lcz_table, lcz_field, lcz_field)
    query += "UPDATE %s SET %s = '5' WHERE %s = 'urban' AND (hre > 10 AND hre <= 25) AND (bur <= 40);\n"                                    % (lcz_table, lcz_field, lcz_field)
    query += "UPDATE %s SET %s = '7' WHERE %s = 'urban' AND (hre <= 10)              AND (bur > 70);\n"                                     % (lcz_table, lcz_field, lcz_field)
    query += "UPDATE %s SET %s = '3' WHERE %s = 'urban' AND (hre <= 10)              AND (bur > 40 AND bur <= 70);\n"                       % (lcz_table, lcz_field, lcz_field)
    query += "UPDATE %s SET %s = '6' WHERE %s = 'urban' AND (hre <= 10)              AND (bur > 20 AND bur <= 40)       AND (ver > 10);\n"  % (lcz_table, lcz_field, lcz_field)
    query += "UPDATE %s SET %s = '8' WHERE %s = 'urban' AND (hre <= 10)              AND (bur > 20 AND bur <= 40)       AND (ver <= 10);\n" % (lcz_table, lcz_field, lcz_field)
    query += "UPDATE %s SET %s = '9' WHERE %s = 'urban' AND (hre <= 10)              AND (bur <= 20);\n"                                    % (lcz_table, lcz_field, lcz_field)

    query += "UPDATE %s SET %s = 'A' WHERE %s = 'natural' AND ((ver >= ror) AND (ver >= bsr) AND (ver >= war)) AND (vhr > 50);\n" % (lcz_table, lcz_field, lcz_field)
    query += "UPDATE %s SET %s = 'B' WHERE %s = 'natural' AND ((ver >= ror) AND (ver >= bsr) AND (ver >= war)) AND (vhr > 25 AND vhr <= 50);\n" % (lcz_table, lcz_field, lcz_field)
    query += "UPDATE %s SET %s = 'C' WHERE %s = 'natural' AND ((ver >= ror) AND (ver >= bsr) AND (ver >= war)) AND (vhr > 10 AND vhr <= 25);\n" % (lcz_table, lcz_field, lcz_field)
    query += "UPDATE %s SET %s = 'D' WHERE %s = 'natural' AND ((ver >= ror) AND (ver >= bsr) AND (ver >= war)) AND (vhr <= 10);\n" % (lcz_table, lcz_field, lcz_field)
    query += "UPDATE %s SET %s = 'E' WHERE %s = 'natural' AND ((ror >= bsr) AND (ror >= war) AND (ror >= ver));\n" % (lcz_table, lcz_field, lcz_field)
    query += "UPDATE %s SET %s = 'F' WHERE %s = 'natural' AND ((bsr >= ror) AND (bsr >= war) AND (bsr >= ver));\n" % (lcz_table, lcz_field, lcz_field)
    query += "UPDATE %s SET %s = 'G' WHERE %s = 'natural' AND ((war >= ror) AND (war >= bsr) AND (war >= ver));\n" % (lcz_table, lcz_field, lcz_field)

    query += "ALTER TABLE %s ALTER COLUMN %s TYPE INTEGER;\n" % (lcz_table, id_field)
    query += "ALTER TABLE %s ALTER COLUMN hre TYPE NUMERIC(6,2);\n" % lcz_table
    query += "ALTER TABLE %s ALTER COLUMN bur TYPE NUMERIC(6,2);\n" % lcz_table
    query += "ALTER TABLE %s ALTER COLUMN ror TYPE NUMERIC(6,2);\n" % lcz_table
    query += "ALTER TABLE %s ALTER COLUMN bsr TYPE NUMERIC(6,2);\n" % lcz_table
    query += "ALTER TABLE %s ALTER COLUMN war TYPE NUMERIC(6,2);\n" % lcz_table
    query += "ALTER TABLE %s ALTER COLUMN ver TYPE NUMERIC(6,2);\n" % lcz_table
    query += "ALTER TABLE %s ALTER COLUMN vhr TYPE NUMERIC(6,2);\n" % lcz_table

    if debug >= 3:
        print('\n' + query)
    executeQuery(connection, query)

    closeConnection(connection)
    exportVectorByOgr2ogr(postgis_database_name, output_lcz, lcz_table, user_name=postgis_user_name, password=postgis_password, ip_host=postgis_ip_host, num_port=postgis_num_port, schema_name=postgis_schema_name, format_type=format_vector)

    ####################################################################

    # Suppression des fichiers temporaires
    if not save_results_intermediate:
        if debug >= 3:
            print('\n' + cyan + "classificationLczSql() : " + endC + "Suppression des fichiers temporaires." + endC + '\n')
        dropDatabase(postgis_database_name, user_name=postgis_user_name, password=postgis_password, ip_host=postgis_ip_host, num_port=postgis_num_port, schema_name=postgis_schema_name)

    print(cyan + "classificationLczSql() : " + bold + green + "FIN DES TRAITEMENTS" + endC + '\n')

    # Mise à jour du log
    ending_event = "classificationLczSql() : Fin du traitement : "
    timeLine(path_time_log, ending_event)

    return 0
def vectorsPreparation(emprise_file, classif_input, grid_input, built_input_list, roads_input_list, grid_output, grid_output_cleaned, built_output, roads_output, col_code_ua, col_item_ua, epsg, path_time_log, format_vector='ESRI Shapefile', extension_vector=".shp", save_results_intermediate=False, overwrite=True):

    print(bold + yellow + "Début de la préparation des fichiers vecteurs." + endC + "\n")
    timeLine(path_time_log, "Début de la préparation des fichiers vecteurs : ")

    if debug >= 3 :
        print(bold + green + "vectorsPreparation() : Variables dans la fonction" + endC)
        print(cyan + "vectorsPreparation() : " + endC + "emprise_file : " + str(emprise_file) + endC)
        print(cyan + "vectorsPreparation() : " + endC + "classif_input : " + str(classif_input) + endC)
        print(cyan + "vectorsPreparation() : " + endC + "grid_input : " + str(grid_input) + endC)
        print(cyan + "vectorsPreparation() : " + endC + "built_input_list : " + str(built_input_list) + endC)
        print(cyan + "vectorsPreparation() : " + endC + "roads_input_list : " + str(roads_input_list) + endC)
        print(cyan + "vectorsPreparation() : " + endC + "grid_output : " + str(grid_output) + endC)
        print(cyan + "vectorsPreparation() : " + endC + "grid_output_cleaned : " + str(grid_output_cleaned) + endC)
        print(cyan + "vectorsPreparation() : " + endC + "built_output : " + str(built_output) + endC)
        print(cyan + "vectorsPreparation() : " + endC + "roads_output : " + str(roads_output) + endC)
        print(cyan + "vectorsPreparation() : " + endC + "col_code_ua : " + str(col_code_ua) + endC)
        print(cyan + "vectorsPreparation() : " + endC + "col_item_ua : " + str(col_item_ua) + endC)
        print(cyan + "vectorsPreparation() : " + endC + "epsg : " + str(epsg))
        print(cyan + "vectorsPreparation() : " + endC + "path_time_log : " + str(path_time_log) + endC)
        print(cyan + "vectorsPreparation() : " + endC + "format_vector : " + str(format_vector) + endC)
        print(cyan + "vectorsPreparation() : " + endC + "extension_vector : " + str(extension_vector) + endC)
        print(cyan + "vectorsPreparation() : " + endC + "save_results_intermediate : " + str(save_results_intermediate) + endC)
        print(cyan + "vectorsPreparation() : " + endC + "overwrite : " + str(overwrite) + endC)

    FOLDER_TEMP = 'TEMP'
    SUFFIX_VECTOR_REPROJECT = '_reproject'
    SUFFIX_VECTOR_INTERSECT = '_intersect'
    SUFFIX_VECTOR_MERGE = '_merge'
    SUFFIX_VECTOR_SELECT = '_select'

    if not os.path.exists(grid_output) or not os.path.exists(built_output) or not os.path.exists(roads_output) or overwrite:

        ############################################
        ### Préparation générale des traitements ###
        ############################################

        path_grid_temp = os.path.dirname(grid_output) + os.sep + FOLDER_TEMP
        path_built_temp = os.path.dirname(built_output) + os.sep + FOLDER_TEMP
        path_roads_temp = os.path.dirname(roads_output) + os.sep + FOLDER_TEMP

        if os.path.exists(path_grid_temp):
            shutil.rmtree(path_grid_temp)
        if os.path.exists(path_built_temp):
            shutil.rmtree(path_built_temp)
        if os.path.exists(path_roads_temp):
            shutil.rmtree(path_roads_temp)

        if not os.path.exists(path_grid_temp):
            os.mkdir(path_grid_temp)
        if not os.path.exists(path_built_temp):
            os.mkdir(path_built_temp)
        if not os.path.exists(path_roads_temp):
            os.mkdir(path_roads_temp)

        basename_grid = os.path.splitext(os.path.basename(grid_output))[0]
        basename_built = os.path.splitext(os.path.basename(built_output))[0]
        basename_roads = os.path.splitext(os.path.basename(roads_output))[0]

        # Variables pour ajout colonne ID
        field_name = 'ID' # Attention ! Nom fixé en dur dans les scripts indicateurs, pas dans le script final
        field_type = ogr.OFTInteger

        ##############################################
        ### Traitements sur le vecteur Urban Atlas ###
        ##############################################

        if not os.path.exists(grid_output) or overwrite :

            if os.path.exists(grid_output):
                removeVectorFile(grid_output)
            if os.path.exists(grid_output_cleaned):
                removeVectorFile(grid_output_cleaned)

            # MAJ projection
            grid_reproject = path_grid_temp + os.sep + basename_grid + SUFFIX_VECTOR_REPROJECT + extension_vector
            updateProjection(grid_input, grid_reproject, projection=epsg)

            # Découpage du fichier Urban Atlas d'entrée à l'emprise de la zone d'étude
            grid_output_temp = os.path.splitext(grid_output)[0] + "_temp" + extension_vector
            cutVector(emprise_file, grid_reproject, grid_output_temp, overwrite, format_vector)

            # Suppression des très petits polygones qui introduisent des valeurs NaN
            pixel_size = getPixelSizeImage(classif_input)
            min_size_area = pixel_size * 2
            cleanMiniAreaPolygons(grid_output_temp, grid_output, min_size_area, '', format_vector)
            if not save_results_intermediate:
                if os.path.exists(grid_output_temp):
                    removeVectorFile(grid_output_temp, format_vector)

            # Ajout d'un champ ID
            addNewFieldVector(grid_output, field_name, field_type, 0, None, None, format_vector)
            updateIndexVector(grid_output, field_name, format_vector)

            # Suppression des polygones eau et routes (uniquement pour le calcul des indicateurs)
            column = "'%s, %s, %s'" % (field_name, col_code_ua, col_item_ua)
            expression = "%s NOT IN ('12210', '12220', '12230', '50000')" % (col_code_ua)
            ret = filterSelectDataVector(grid_output, grid_output_cleaned, column, expression, format_vector)
            if not ret :
                raise NameError (cyan + "vectorsPreparation : " + bold + red  + "Attention problème lors du filtrage des BD vecteurs l'expression SQL %s est incorrecte" %(expression) + endC)

        #########################################
        ### Traitements sur les vecteurs bâti ###
        #########################################
        if not os.path.exists(built_output) or overwrite :

            if os.path.exists(built_output):
                removeVectorFile(built_output)

            # MAJ projection
            built_reproject_list=[]
            for built_input in built_input_list:
                built_reproject = path_built_temp + os.sep + os.path.splitext(os.path.basename(built_input))[0] + SUFFIX_VECTOR_REPROJECT + extension_vector
                updateProjection(built_input, built_reproject, projection=epsg)
                built_reproject_list.append(built_reproject)

            # Sélection des entités bâti dans l'emprise de l'étude
            built_intersect_list = []
            for built_reproject in built_reproject_list:
                built_intersect = path_built_temp + os.sep + os.path.splitext(os.path.basename(built_reproject))[0] + SUFFIX_VECTOR_INTERSECT + extension_vector
                intersectVector(emprise_file, built_reproject, built_intersect, format_vector)
                built_intersect_list.append(built_intersect)

            # Fusion des couches bâti de la BD TOPO
            built_merge = path_built_temp + os.sep + basename_built + SUFFIX_VECTOR_MERGE + extension_vector
            built_select = path_built_temp + os.sep + basename_built + SUFFIX_VECTOR_SELECT + extension_vector
            fusionVectors(built_intersect_list, built_merge)

            # Suppression des polygones où la hauteur du bâtiment est à 0
            column = "HAUTEUR"
            expression = "HAUTEUR > 0"
            ret = filterSelectDataVector(built_merge, built_select, column, expression, format_vector)
            if not ret :
                raise NameError (cyan + "vectorsPreparation : " + bold + red  + "Attention problème lors du filtrage des BD vecteurs l'expression SQL %s est incorrecte" %(expression) + endC)

            # Découpage des bati d'entrée à l'emprise de la zone d'étude
            cutVector(emprise_file, built_select, built_output, overwrite, format_vector)

            # Ajout d'un champ ID
            addNewFieldVector(built_output, field_name, field_type, 0, None, None, format_vector)
            updateIndexVector(built_output, field_name, format_vector)

        ###########################################
        ### Traitements sur les vecteurs routes ###
        ###########################################

        if not os.path.exists(roads_output) or overwrite :

            if os.path.exists(roads_output):
                removeVectorFile(roads_output)

            # MAJ projection
            roads_reproject_list=[]
            for roads_input in roads_input_list:
                roads_reproject = path_roads_temp + os.sep + os.path.splitext(os.path.basename(roads_input))[0] + SUFFIX_VECTOR_REPROJECT + extension_vector
                updateProjection(roads_input, roads_reproject, projection=epsg)
                roads_reproject_list.append(roads_reproject)

            # Sélection des entités routes dans l'emprise de l'étude
            roads_intersect_list = []
            for roads_reproject in roads_reproject_list:
                roads_intersect = path_roads_temp + os.sep + os.path.splitext(os.path.basename(roads_reproject))[0] + SUFFIX_VECTOR_INTERSECT + extension_vector
                intersectVector(emprise_file, roads_reproject, roads_intersect, format_vector)
                roads_intersect_list.append(roads_intersect)

            # Fusion des couches route de la BD TOPO
            roads_merge = path_roads_temp + os.sep + basename_roads + SUFFIX_VECTOR_MERGE + extension_vector
            roads_select = path_roads_temp + os.sep + basename_roads + SUFFIX_VECTOR_SELECT + extension_vector
            fusionVectors(roads_intersect_list, roads_merge)

            # Sélection des entités suivant la nature de la route dans la couche routes de la BD TOPO
            column = "NATURE"
            expression = "NATURE IN ('Autoroute', 'Bretelle', 'Quasi-autoroute', 'Route  1 chausse', 'Route  2 chausses', 'Route a 1 chaussee', 'Route a 2 chaussees', 'Route à 1 chaussée', 'Route à 2 chaussées')"
            ret = filterSelectDataVector (roads_merge, roads_select, column, expression, format_vector)
            if not ret :
                raise NameError (cyan + "vectorsPreparation : " + bold + red  + "Attention problème lors du filtrage des BD vecteurs l'expression SQL %s est incorrecte" %(expression) + endC)

            # Découpage des routes d'entrée à l'emprise de la zone d'étude
            cutVectorAll(emprise_file, roads_select, roads_output, overwrite, format_vector)

            # Ajout d'un champ ID
            addNewFieldVector(roads_output, field_name, field_type, 0, None, None, format_vector)
            updateIndexVector(roads_output, field_name, format_vector)

        ##########################################
        ### Nettoyage des fichiers temporaires ###
        ##########################################

        if not save_results_intermediate:
            if os.path.exists(path_grid_temp):
                shutil.rmtree(path_grid_temp)
            if os.path.exists(path_built_temp):
                shutil.rmtree(path_built_temp)
            if os.path.exists(path_roads_temp):
                shutil.rmtree(path_roads_temp)

    else:
        print(bold + magenta + "La préparation des fichiers vecteurs a déjà eu lieu.\n" + endC)

    print(bold + yellow + "Fin de la préparation des fichiers vecteurs.\n" + endC)
    timeLine(path_time_log, "Fin de la préparation des fichiers vecteurs : ")

    return
Exemplo n.º 9
0
def computeRoughnessByOcsMnh( grid_input, grid_output, mnh_input, classif_input, class_build_list, epsg, no_data_value, path_time_log, format_raster='GTiff', format_vector='ESRI Shapefile', extension_raster=".tif", extension_vector=".shp", save_results_intermediate=False, overwrite=True):

    # Constante
    FIELD_H_TYPE = ogr.OFTReal
    FIELD_ID_TYPE = ogr.OFTInteger
    FIELD_NAME_HSUM = "sum_h"
    FIELD_NAME_HRE = "mean_h"
    FIELD_NAME_AREA = "nb_area"
    FIELD_NAME_ID = "id"

    SUFFIX_HEIGHT = '_hauteur'
    SUFFIX_BUILT = '_bati'
    SUFFIX_TEMP = '_temp'
    SUFFIX_MASK = '_mask'

    # Mise à jour du Log
    timeLine(path_time_log, "Début du calcul de l'indicateur Height of Roughness Elements par OCS et MNT starting : ")
    print(cyan + "computeRoughnessByOcsMnh() : " + endC + "Début du calcul de l'indicateur Height of Roughness Elements par OCS et MNT." + endC + "\n")

    if debug >= 3:
        print(bold + green + "computeRoughnessByOcsMnh() : Variables dans la fonction" + endC)
        print(cyan + "computeRoughnessByOcsMnh() : " + endC + "grid_input : " + str(grid_input) + endC)
        print(cyan + "computeRoughnessByOcsMnh() : " + endC + "grid_output : " + str(grid_output) + endC)
        print(cyan + "computeRoughnessByOcsMnh() : " + endC + "mnh_input : " + str(mnh_input) + endC)
        print(cyan + "computeRoughnessByOcsMnh() : " + endC + "classif_input : " + str(classif_input) + endC)
        print(cyan + "computeRoughnessByOcsMnh() : " + endC + "class_build_list : " + str(class_build_list) + endC)
        print(cyan + "computeRoughnessByOcsMnh() : " + endC + "epsg : " + str(epsg) + endC)
        print(cyan + "computeRoughnessByOcsMnh() : " + endC + "no_data_value : " + str(no_data_value) + endC)
        print(cyan + "computeRoughnessByOcsMnh() : " + endC + "path_time_log : " + str(path_time_log) + endC)
        print(cyan + "computeRoughnessByOcsMnh() : " + endC + "format_vector : " + str(format_vector) + endC)
        print(cyan + "computeRoughnessByOcsMnh() : " + endC + "save_results_intermediate : " + str(save_results_intermediate) + endC)
        print(cyan + "computeRoughnessByOcsMnh() : " + endC + "overwrite : " + str(overwrite) + endC)

    # Test si le vecteur de sortie existe déjà et si il doit être écrasés
    check = os.path.isfile(grid_output)

    if check and not overwrite: # Si le fichier de sortie existent deja et que overwrite n'est pas activé
        print(cyan + "computeRoughnessByOcsMnh() : " + bold + yellow + "Le calcul de Roughness par OCS et MNT a déjà eu lieu." + endC + "\n")
        print(cyan + "computeRoughnessByOcsMnh() : " + bold + yellow + "Grid vector output : " + grid_output + " already exists and will not be created again." + endC)
    else :
        if check:
            try:
                removeVectorFile(grid_output)
            except Exception:
                pass # si le fichier n'existe pas, il ne peut pas être supprimé : cette étape est ignorée

        ############################################
        ### Préparation générale des traitements ###
        ############################################

        # Récuperation de la projection de l'image
        epsg_proj = getProjectionImage(classif_input)
        if epsg_proj == 0:
            epsg_proj = epsg

        # Préparation des fichiers temporaires
        temp_path = os.path.dirname(grid_output) + os.sep + "RoughnessByOcsAndMnh"

        # Nettoyage du repertoire temporaire si il existe
        if os.path.exists(temp_path):
            shutil.rmtree(temp_path)
        os.makedirs(temp_path)

        basename = os.path.splitext(os.path.basename(grid_output))[0]
        built_height = temp_path + os.sep + basename + SUFFIX_HEIGHT + SUFFIX_BUILT + extension_raster
        built_height_temp = temp_path + os.sep + basename + SUFFIX_HEIGHT + SUFFIX_BUILT + SUFFIX_TEMP + extension_raster
        classif_built_mask = temp_path + os.sep + basename + SUFFIX_BUILT + SUFFIX_MASK + extension_raster
        grid_output_temp = temp_path + os.sep + basename + SUFFIX_TEMP + extension_vector

        ##############################
        ### Calcul de l'indicateur ###
        ##############################

        # liste des classes de bati a prendre en compte dans l'expression du BandMath
        expression_bati = ""
        for id_class in class_build_list :
            expression_bati += "im1b1==%s or " %(str(id_class))
        expression_bati = expression_bati[:-4]
        expression = "(%s) and (im2b1!=%s) and (im2b1>0)" %(expression_bati, str(no_data_value))

        # Creation d'un masque vecteur des batis pour la surface des zones baties
        command = "otbcli_BandMath -il %s %s -out %s uint8 -exp '%s ? 1 : 0'" %(classif_input, mnh_input, classif_built_mask, expression)
        if debug >= 3:
            print(command)
        exit_code = os.system(command)
        if exit_code != 0:
            print(command)
            print(cyan + "computeRoughnessByOcsMnh() : " + bold + red + "!!! Une erreur c'est produite au cours de la commande otbcli_BandMath : " + command + ". Voir message d'erreur." + endC, file=sys.stderr)
            raise

        # Récupération de la hauteur du bati
        command = "otbcli_BandMath -il %s %s -out %s float -exp '%s ? im2b1 : 0'" %(classif_input, mnh_input, built_height_temp, expression)
        if debug >= 3:
            print(command)
        exit_code = os.system(command)
        if exit_code != 0:
            print(command)
            print(cyan + "computeRoughnessByOcsMnh() : " + bold + red + "!!! Une erreur c'est produite au cours de la commande otbcli_BandMath : " + command + ". Voir message d'erreur." + endC, file=sys.stderr)
            raise

        command = "gdal_translate -a_srs EPSG:%s -a_nodata %s -of %s %s %s" %(str(epsg_proj), str(no_data_value), format_raster, built_height_temp, built_height)
        if debug >= 3:
            print(command)
        exit_code = os.system(command)
        if exit_code != 0:
            print(command)
            print(cyan + "computeRoughnessByOcsMnh() : " + bold + red + "!!! Une erreur c'est produite au cours de la comande : gdal_translate : " + command + ". Voir message d'erreur." + endC, file=sys.stderr)
            raise

        # Récupération du nombre de pixel bati de chaque maille pour definir la surface
        statisticsVectorRaster(classif_built_mask, grid_input, grid_output_temp, 1, False, False, True, ["min", "max", "median", "mean", "std", "unique", "range"], [], {}, path_time_log, True, format_vector, save_results_intermediate, overwrite)

        # Renomer le champ 'sum' en FIELD_NAME_AREA
        renameFieldsVector(grid_output_temp, ['sum'], [FIELD_NAME_AREA], format_vector)

        # Récupération de la hauteur moyenne du bati de chaque maille
        statisticsVectorRaster(built_height, grid_output_temp, grid_output, 1, False, False, True, ["min", "max", "median", 'mean', "std", "unique", "range"], [], {}, path_time_log, True, format_vector, save_results_intermediate, overwrite)

        # Renomer le champ 'mean' en FIELD_NAME_HRE
        renameFieldsVector(grid_output, ['sum'], [FIELD_NAME_HSUM], format_vector)

        # Calcul de la colonne FIELD_NAME_HRE division de FIELD_NAME_HSUM par FIELD_NAME_AREA
        field_values_list = getAttributeValues(grid_output, None, None, {FIELD_NAME_ID:FIELD_ID_TYPE, FIELD_NAME_HSUM:FIELD_H_TYPE, FIELD_NAME_AREA:FIELD_H_TYPE}, format_vector)

        field_new_values_list = []
        for index in range(0, len(field_values_list[FIELD_NAME_ID])) :
            value_h = 0.0
            if field_values_list[FIELD_NAME_AREA][index] > 0 :
                value_h = field_values_list[FIELD_NAME_HSUM][index] / field_values_list[FIELD_NAME_AREA][index]
            field_new_values_list.append({FIELD_NAME_HRE:value_h})

        # Ajour de la nouvelle colonne calculé FIELD_NAME_HRE
        addNewFieldVector(grid_output, FIELD_NAME_HRE, FIELD_H_TYPE, 0, None, None, format_vector)
        setAttributeValuesList(grid_output, field_new_values_list, format_vector)

        ##########################################
        ### Nettoyage des fichiers temporaires ###
        ##########################################
        if not save_results_intermediate:
            if os.path.exists(temp_path):
                shutil.rmtree(temp_path)

    print(cyan + "computeRoughnessByOcsMnh() : " + endC + "Fin du calcul de l'indicateur Height of Roughness Elements par OCS et MNT." + endC + "\n")
    timeLine(path_time_log, "Fin du calcul de l'indicateur Height of Roughness Elements par OCS et MNT  ending : ")

    return
Exemplo n.º 10
0
def computeRoughness(classif_input,
                     mnh_input,
                     vector_grid_input,
                     vector_grid_output,
                     class_label_dico,
                     epsg,
                     path_time_log,
                     format_raster='GTiff',
                     format_vector='ESRI Shapefile',
                     extension_raster=".tif",
                     extension_vector=".shp",
                     save_results_intermediate=False,
                     overwrite=True):

    # Constante
    FIELD_NAME_HRE = "mean_h"

    # Mise à jour du Log
    timeLine(
        path_time_log,
        "Début du calcul de l'indicateur Height of Roughness Elements par OCS et MNT starting : "
    )
    print(
        cyan + "computeRoughness() : " + endC +
        "Début du calcul de l'indicateur Height of Roughness Elements par OCS et MNT."
        + endC + "\n")

    if debug >= 3:
        print(bold + green +
              "computeRoughness() : Variables dans la fonction" + endC)
        print(cyan + "computeRoughness() : " + endC + "classif_input : " +
              str(classif_input) + endC)
        print(cyan + "computeRoughness() : " + endC + "mnh_input : " +
              str(mnh_input) + endC)
        print(cyan + "computeRoughness() : " + endC + "vector_grid_input : " +
              str(vector_grid_input) + endC)
        print(cyan + "computeRoughness() : " + endC + "vector_grid_output : " +
              str(vector_grid_output) + endC)
        print(cyan + "computeRoughness() : " + endC + "class_label_dico : " +
              str(class_label_dico) + endC)
        print(cyan + "computeRoughness() : " + endC + "epsg : " + str(epsg) +
              endC)
        print(cyan + "computeRoughness() : " + endC + "path_time_log : " +
              str(path_time_log) + endC)
        print(cyan + "computeRoughness() : " + endC + "format_vector : " +
              str(format_vector) + endC)
        print(cyan + "computeRoughness() : " + endC +
              "save_results_intermediate : " + str(save_results_intermediate) +
              endC)
        print(cyan + "computeRoughness() : " + endC + "overwrite : " +
              str(overwrite) + endC)

    # Test si le vecteur de sortie existe déjà et si il doit être écrasés
    check = os.path.isfile(vector_grid_output)

    if check and not overwrite:  # Si le fichier de sortie existent deja et que overwrite n'est pas activé
        print(cyan + "computeRoughness() : " + bold + yellow +
              "Le calcul de Roughness par OCS et MNT a déjà eu lieu." + endC +
              "\n")
        print(cyan + "computeRoughness() : " + bold + yellow +
              "Grid vector output : " + vector_grid_output +
              " already exists and will not be created again." + endC)
    else:
        if check:
            try:
                removeVectorFile(vector_grid_output)
            except Exception:
                pass  # si le fichier n'existe pas, il ne peut pas être supprimé : cette étape est ignorée

        ############################################
        ### Préparation générale des traitements ###
        ############################################

        # Récuperation de la projection de l'image
        epsg_proj = getProjectionImage(classif_input)
        if epsg_proj == 0:
            epsg_proj = epsg

        # Liste des classes
        #key_class_label_list = list(class_label_dico.keys())

        # Préparation des fichiers temporaires
        temp_path = os.path.dirname(vector_grid_output) + os.sep + "TEMP_HRE"

        # Nettoyage du repertoire temporaire si il existe
        if os.path.exists(temp_path):
            shutil.rmtree(temp_path)
        os.makedirs(temp_path)

        buit_height_temp = temp_path + os.sep + "hauteur_bati_temp" + extension_raster
        buit_height = temp_path + os.sep + "hauteur_bati" + extension_raster

        ##############################
        ### Calcul de l'indicateur ###
        ##############################

        # Récupération de la hauteur du bati
        #code_bati = [c for c,v in class_label_dico.items() if v=="bati"][0]
        code_bati = list(class_label_dico.keys())[list(
            class_label_dico.values()).index("bati")]
        command = "otbcli_BandMath -il %s %s -out %s float -exp 'im1b1==%s ? im2b1 : 0'" % (
            classif_input, mnh_input, buit_height_temp, str(code_bati))
        exit_code = os.system(command)
        if exit_code != 0:
            print(command)
            print(
                cyan + "computeRoughness() : " + bold + red +
                "!!! Une erreur c'est produite au cours de la commande otbcli_BandMath : "
                + command + ". Voir message d'erreur." + endC,
                file=sys.stderr)
            raise

        command = "gdal_translate -a_srs EPSG:%s -a_nodata 0 -of %s %s %s" % (
            str(epsg_proj), format_raster, buit_height_temp, buit_height)
        exit_code = os.system(command)
        if exit_code != 0:
            print(command)
            print(
                cyan + "computeRoughness() : " + bold + red +
                "!!! Une erreur c'est produite au cours de la comande : gdal_translate : "
                + command + ". Voir message d'erreur." + endC,
                file=sys.stderr)
            raise

        # Récupération de la hauteur moyenne du bati de chaque maille
        statisticsVectorRaster(
            buit_height, vector_grid_input, vector_grid_output, 1, False,
            False, True,
            ["min", "max", "median", "sum", "std", "unique", "range"], [], {},
            path_time_log, True, format_vector, save_results_intermediate,
            overwrite)

        # Renomer le champ 'mean' en FIELD_NAME_HRE
        renameFieldsVector(vector_grid_output, ['mean'], [FIELD_NAME_HRE],
                           format_vector)

        ##########################################
        ### Nettoyage des fichiers temporaires ###
        ##########################################
        if not save_results_intermediate:
            if os.path.exists(temp_path):
                shutil.rmtree(temp_path)

    print(
        cyan + "computeRoughness() : " + endC +
        "Fin du calcul de l'indicateur Height of Roughness Elements par OCS et MNT."
        + endC + "\n")
    timeLine(
        path_time_log,
        "Fin du calcul de l'indicateur Height of Roughness Elements par OCS et MNT  ending : "
    )

    return
Exemplo n.º 11
0
def estimateQualityClassification(image_input,
                                  vector_cut_input,
                                  vector_sample_input,
                                  vector_output,
                                  nb_dot,
                                  no_data_value,
                                  column_name_vector,
                                  column_name_ref,
                                  column_name_class,
                                  path_time_log,
                                  epsg=2154,
                                  format_raster='GTiff',
                                  format_vector="ESRI Shapefile",
                                  extension_raster=".tif",
                                  extension_vector=".shp",
                                  save_results_intermediate=False,
                                  overwrite=True):

    # Mise à jour du Log
    starting_event = "estimateQualityClassification() : Masks creation starting : "
    timeLine(path_time_log, starting_event)

    print(endC)
    print(bold + green +
          "## START : CREATE PRINT POINTS FILE FROM CLASSIF IMAGE" + endC)
    print(endC)

    if debug >= 2:
        print(bold + green +
              "estimateQualityClassification() : Variables dans la fonction" +
              endC)
        print(cyan + "estimateQualityClassification() : " + endC +
              "image_input : " + str(image_input) + endC)
        print(cyan + "estimateQualityClassification() : " + endC +
              "vector_cut_input : " + str(vector_cut_input) + endC)
        print(cyan + "estimateQualityClassification() : " + endC +
              "vector_sample_input : " + str(vector_sample_input) + endC)
        print(cyan + "estimateQualityClassification() : " + endC +
              "vector_output : " + str(vector_output) + endC)
        print(cyan + "estimateQualityClassification() : " + endC +
              "nb_dot : " + str(nb_dot) + endC)
        print(cyan + "estimateQualityClassification() : " + endC +
              "no_data_value : " + str(no_data_value) + endC)
        print(cyan + "estimateQualityClassification() : " + endC +
              "column_name_vector : " + str(column_name_vector) + endC)
        print(cyan + "estimateQualityClassification() : " + endC +
              "column_name_ref : " + str(column_name_ref) + endC)
        print(cyan + "estimateQualityClassification() : " + endC +
              "column_name_class : " + str(column_name_class) + endC)
        print(cyan + "estimateQualityClassification() : " + endC +
              "path_time_log : " + str(path_time_log) + endC)
        print(cyan + "estimateQualityClassification() : " + endC + "epsg  : " +
              str(epsg) + endC)
        print(cyan + "estimateQualityClassification() : " + endC +
              "format_raster : " + str(format_raster) + endC)
        print(cyan + "estimateQualityClassification() : " + endC +
              "format_vector : " + str(format_vector) + endC)
        print(cyan + "estimateQualityClassification() : " + endC +
              "extension_raster : " + str(extension_raster) + endC)
        print(cyan + "estimateQualityClassification() : " + endC +
              "extension_vector : " + str(extension_vector) + endC)
        print(cyan + "estimateQualityClassification() : " + endC +
              "save_results_intermediate : " + str(save_results_intermediate) +
              endC)
        print(cyan + "estimateQualityClassification() : " + endC +
              "overwrite : " + str(overwrite) + endC)

    # ETAPE 0 : PREPARATION DES FICHIERS INTERMEDIAIRES

    CODAGE = "uint16"

    SUFFIX_STUDY = '_study'
    SUFFIX_CUT = '_cut'
    SUFFIX_TEMP = '_temp'
    SUFFIX_SAMPLE = '_sample'

    repertory_output = os.path.dirname(vector_output)
    base_name = os.path.splitext(os.path.basename(vector_output))[0]

    vector_output_temp = repertory_output + os.sep + base_name + SUFFIX_TEMP + extension_vector
    raster_study = repertory_output + os.sep + base_name + SUFFIX_STUDY + extension_raster
    vector_study = repertory_output + os.sep + base_name + SUFFIX_STUDY + extension_vector
    raster_cut = repertory_output + os.sep + base_name + SUFFIX_CUT + extension_raster
    vector_sample_temp = repertory_output + os.sep + base_name + SUFFIX_SAMPLE + SUFFIX_TEMP + extension_vector

    # Mise à jour des noms de champs
    input_ref_col = ""
    val_ref = 0
    if (column_name_vector != "") and (not column_name_vector is None):
        input_ref_col = column_name_vector
    if (column_name_ref != "") and (not column_name_ref is None):
        val_ref_col = column_name_ref
    if (column_name_class != "") and (not column_name_class is None):
        val_class_col = column_name_class

    # ETAPE 1 : DEFINIR UN SHAPE ZONE D'ETUDE

    if (not vector_cut_input is None) and (vector_cut_input != "") and (
            os.path.isfile(vector_cut_input)):
        cutting_action = True
        vector_study = vector_cut_input

    else:
        cutting_action = False
        createVectorMask(image_input, vector_study)

    # ETAPE 2 : DECOUPAGE DU RASTEUR PAR LE VECTEUR D'EMPRISE SI BESOIN

    if cutting_action:
        # Identification de la tailles de pixels en x et en y
        pixel_size_x, pixel_size_y = getPixelWidthXYImage(image_input)

        # Si le fichier de sortie existe deja le supprimer
        if os.path.exists(raster_cut):
            removeFile(raster_cut)

        # Commande de découpe
        if not cutImageByVector(vector_study, image_input, raster_cut,
                                pixel_size_x, pixel_size_y, no_data_value, 0,
                                format_raster, format_vector):
            raise NameError(
                cyan + "estimateQualityClassification() : " + bold + red +
                "Une erreur c'est produite au cours du decoupage de l'image : "
                + image_input + endC)
        if debug >= 2:
            print(cyan + "estimateQualityClassification() : " + bold + green +
                  "DECOUPAGE DU RASTER %s AVEC LE VECTEUR %s" %
                  (image_input, vector_study) + endC)
    else:
        raster_cut = image_input

    # ETAPE 3 : CREATION DE LISTE POINTS AVEC DONNEE ISSU D'UN FICHIER RASTER

    # Gémotrie de l'image
    cols, rows, bands = getGeometryImage(raster_cut)
    xmin, xmax, ymin, ymax = getEmpriseImage(raster_cut)
    pixel_width, pixel_height = getPixelWidthXYImage(raster_cut)

    if debug >= 2:
        print("cols : " + str(cols))
        print("rows : " + str(rows))
        print("bands : " + str(bands))
        print("xmin : " + str(xmin))
        print("ymin : " + str(ymin))
        print("xmax : " + str(xmax))
        print("ymax : " + str(ymax))
        print("pixel_width : " + str(pixel_width))
        print("pixel_height : " + str(pixel_height))

    # ETAPE 3-1 : CAS CREATION D'UN FICHIER DE POINTS PAR TIRAGE ALEATOIRE DANS LA MATRICE IMAGE
    if (vector_sample_input is None) or (vector_sample_input == ""):
        is_sample_file = False

        # Les dimensions de l'image
        nb_pixels = abs(cols * rows)

        # Tirage aléatoire des points
        drawn_dot_list = []
        while len(drawn_dot_list) < nb_dot:
            val = random.randint(0, nb_pixels)
            if not val in drawn_dot_list:
                drawn_dot_list.append(val)

        # Creation d'un dico index valeur du tirage et attibuts pos_x, pos_y et value pixel
        points_random_value_dico = {}

        points_coordonnees_list = []
        for point in drawn_dot_list:
            pos_y = point // cols
            pos_x = point % cols
            coordonnees_list = [pos_x, pos_y]
            points_coordonnees_list.append(coordonnees_list)

        # Lecture dans le fichier raster des valeurs
        values_list = getPixelsValueListImage(raster_cut,
                                              points_coordonnees_list)
        print(values_list)
        for idx_point in range(len(drawn_dot_list)):
            val_class = values_list[idx_point]
            coordonnees_list = points_coordonnees_list[idx_point]
            pos_x = coordonnees_list[0]
            pos_y = coordonnees_list[1]
            coor_x = xmin + (pos_x * abs(pixel_width))
            coor_y = ymax - (pos_y * abs(pixel_height))
            point_attr_dico = {
                "Ident": idx_point,
                val_ref_col: int(val_ref),
                val_class_col: int(val_class)
            }
            points_random_value_dico[idx_point] = [[coor_x, coor_y],
                                                   point_attr_dico]

            if debug >= 4:
                print("idx_point : " + str(idx_point))
                print("pos_x : " + str(pos_x))
                print("pos_y : " + str(pos_y))
                print("coor_x : " + str(coor_x))
                print("coor_y : " + str(coor_y))
                print("val_class : " + str(val_class))
                print("")

    # ETAPE 3-2 : CAS D'UN FICHIER DE POINTS DEJA EXISTANT MISE A JOUR DE LA DONNEE ISSU Du RASTER
    else:
        # Le fichier de points d'analyses existe
        is_sample_file = True
        cutVectorAll(vector_study, vector_sample_input, vector_sample_temp,
                     format_vector)
        if input_ref_col != "":
            points_coordinates_dico = readVectorFilePoints(
                vector_sample_temp, [input_ref_col], format_vector)
        else:
            points_coordinates_dico = readVectorFilePoints(
                vector_sample_temp, [], format_vector)

        # Création du dico
        points_random_value_dico = {}

        points_coordonnees_list = []
        for index_key in points_coordinates_dico:
            # Recuperer les valeurs des coordonnees
            coord_info_list = points_coordinates_dico[index_key]
            coor_x = coord_info_list[0]
            coor_y = coord_info_list[1]
            pos_x = int(round((coor_x - xmin) / abs(pixel_width)))
            pos_y = int(round((ymax - coor_y) / abs(pixel_height)))
            coordonnees_list = [pos_x, pos_y]
            points_coordonnees_list.append(coordonnees_list)

        # Lecture dans le fichier raster des valeurs
        values_list = getPixelsValueListImage(raster_cut,
                                              points_coordonnees_list)

        for index_key in points_coordinates_dico:
            # Récuperer les valeurs des coordonnees
            coord_info_list = points_coordinates_dico[index_key]
            coor_x = coord_info_list[0]
            coor_y = coord_info_list[1]
            # Récupérer la classe de référence dans le vecteur d'entrée
            if input_ref_col != "":
                label = coord_info_list[2]
                val_ref = label.get(input_ref_col)
            # Récupérer la classe issue du raster d'entrée
            val_class = values_list[index_key]
            # Création du dico contenant identifiant du point, valeur de référence, valeur du raster d'entrée
            point_attr_dico = {
                "Ident": index_key,
                val_ref_col: int(val_ref),
                val_class_col: int(val_class)
            }
            if debug >= 4:
                print("point_attr_dico: " + str(point_attr_dico))
            points_random_value_dico[index_key] = [[coor_x, coor_y],
                                                   point_attr_dico]

    # ETAPE 4 : CREATION ET DECOUPAGE DU FICHIER VECTEUR RESULTAT PAR LE SHAPE D'ETUDE

    # Creer le fichier de points
    if is_sample_file and os.path.exists(vector_sample_temp):

        attribute_dico = {val_class_col: ogr.OFTInteger}
        # Recopie du fichier
        removeVectorFile(vector_output_temp)
        copyVectorFile(vector_sample_temp, vector_output_temp)

        # Ajout des champs au fichier de sortie
        for field_name in attribute_dico:
            addNewFieldVector(vector_output_temp, field_name,
                              attribute_dico[field_name], 0, None, None,
                              format_vector)

        # Préparation des donnees
        field_new_values_list = []
        for index_key in points_random_value_dico:
            point_attr_dico = points_random_value_dico[index_key][1]
            point_attr_dico.pop(val_ref_col, None)
            field_new_values_list.append(point_attr_dico)

        # Ajout des donnees
        setAttributeValuesList(vector_output_temp, field_new_values_list,
                               format_vector)

    else:
        # Définir les attibuts du fichier résultat
        attribute_dico = {
            "Ident": ogr.OFTInteger,
            val_ref_col: ogr.OFTInteger,
            val_class_col: ogr.OFTInteger
        }

        createPointsFromCoordList(attribute_dico, points_random_value_dico,
                                  vector_output_temp, epsg, format_vector)

    # Découpage du fichier de points d'echantillons
    cutVectorAll(vector_study, vector_output_temp, vector_output,
                 format_vector)

    # ETAPE 5 : SUPPRESIONS FICHIERS INTERMEDIAIRES INUTILES

    # Suppression des données intermédiaires
    if not save_results_intermediate:
        if cutting_action:
            removeFile(raster_cut)
        else:
            removeVectorFile(vector_study)
            removeFile(raster_study)
        if is_sample_file:
            removeVectorFile(vector_sample_temp)
        removeVectorFile(vector_output_temp)

    print(endC)
    print(bold + green +
          "## END : CREATE PRINT POINTS FILE FROM CLASSIF IMAGE" + endC)
    print(endC)

    # Mise à jour du Log
    ending_event = "estimateQualityClassification() : Masks creation ending : "
    timeLine(path_time_log, ending_event)

    return
def createEmprise(input_dir,
                  output_file,
                  is_not_assembled,
                  is_all_polygons_used,
                  is_not_date,
                  is_optimize_emprise,
                  is_optimize_emprise_nodata,
                  no_data_value,
                  size_erode,
                  path_time_log,
                  separ_name="_",
                  pos_date=1,
                  nb_char_date=8,
                  separ_date="",
                  epsg=2154,
                  format_vector='ESRI Shapefile',
                  extension_raster=".tif",
                  extension_vector=".shp",
                  save_results_intermediate=False,
                  overwrite=True):

    # Affichage des paramètres
    if debug >= 3:
        print(bold + green +
              "Variables dans le createEmprise - Variables générales" + endC)
        print(cyan + "createEmprise() : " + endC + "input_dir : " +
              str(input_dir))
        print(cyan + "createEmprise() : " + endC + "output_file : " +
              str(output_file))
        print(cyan + "createEmprise() : " + endC + "is_not_assembled : " +
              str(is_not_assembled))
        print(cyan + "createEmprise() : " + endC + "is_all_polygons_used : " +
              str(is_all_polygons_used))
        print(cyan + "createEmprise() : " + endC + "is_not_date : " +
              str(is_not_date))
        print(cyan + "createEmprise() : " + endC + "is_optimize_emprise : " +
              str(is_optimize_emprise))
        print(cyan + "createEmprise() : " + endC +
              "is_optimize_emprise_nodata : " +
              str(is_optimize_emprise_nodata))
        print(cyan + "createEmprise() : " + endC + "no_data_value : " +
              str(no_data_value))
        print(cyan + "createEmprise() : " + endC + "size_erode : " +
              str(size_erode))
        print(cyan + "createEmprise() : " + endC + "path_time_log : " +
              str(path_time_log))
        print(cyan + "createEmprise() : " + endC + "separ_name : " +
              str(separ_name))
        print(cyan + "createEmprise() : " + endC + "pos_date : " +
              str(pos_date))
        print(cyan + "createEmprise() : " + endC + "nb_char_date : " +
              str(nb_char_date))
        print(cyan + "createEmprise() : " + endC + "separ_date : " +
              str(separ_date))
        print(cyan + "createEmprise() : " + endC + "epsg : " + str(epsg))
        print(cyan + "createEmprise() : " + endC + "format_vector : " +
              str(format_vector))
        print(cyan + "createEmprise() : " + endC + "extension_raster : " +
              str(extension_raster) + endC)
        print(cyan + "createEmprise() : " + endC + "extension_vector : " +
              str(extension_vector) + endC)
        print(cyan + "createEmprise() : " + endC +
              "save_results_intermediate : " + str(save_results_intermediate))
        print(cyan + "createEmprise() : " + endC + "overwrite : " +
              str(overwrite))

# Constantes
    EXT_LIST_HDF5 = ['h5', 'H5', 'he5', 'HE5', 'hdf5', 'HDF5']
    EXT_LIST = EXT_LIST_HDF5 + [
        'tif', 'TIF', 'tiff', 'TIFF', 'ecw', 'ECW', 'jp2', 'JP2', 'dim', 'DIM',
        'asc', 'ASC'
    ]
    SUFFIX_DETAILLEE = "_detail"
    SUFFIX_MASK_ZERO = "_mask_zeros"
    SUFFIX_TMP = "_tmp"

    CODAGE_8B = "uint8"
    ATTR_NAME_ID = "Id"
    ATTR_NAME_NOMIMAGE = "NomImage"
    ATTR_NAME_DATEACQUI = "DateAcqui"
    ATTR_NAME_HEUREACQUI = "HeureAcqui"
    ATTR_NAME_REFDOSSIER = "RefDossier"

    # Variables
    points_list = []
    name_image_list = []
    name_rep_list = []
    ref_dossier_list = []
    date_list = []
    heure_list = []
    optimize_emprise_nodata_shape_list = []
    polygons_attr_coord_dico = {}
    pos_date = pos_date - 1

    repertory_output = os.path.dirname(output_file)
    file_name = os.path.splitext(os.path.basename(output_file))[0]
    extension = os.path.splitext(output_file)[1]
    file_vector_detail = repertory_output + os.sep + file_name + SUFFIX_DETAILLEE + extension

    # Si un fichier de sortie avec le même nom existe déjà, et si l'option ecrasement est à false, alors passe au masque suivant
    check = os.path.isfile(output_file)
    if check and not overwrite:
        print(
            bold + yellow + "createEmprise() : " + endC +
            "Le fichier vecteur d'emprise %s existe déjà : pas d'actualisation"
            % (output_file) + endC)
    # Si non, ou si la fonction ecrasement est désative, alors on le calcule
    else:
        if check:
            try:  # Suppression de l'éventuel fichier existant
                removeVectorFile(output_file)
                removeVectorFile(file_vector_detail)
            except Exception:
                pass  # Si le fichier ne peut pas être supprimé, on suppose qu'il n'existe pas et on passe à la suite

        # Récuperer tous les sous répertoires
        sub_rep_list = getSubRepRecursifList(input_dir)
        sub_rep_list.append(input_dir)

        # Parcours de chaque dossier image du dossier en entrée
        for repertory in sub_rep_list:
            if os.path.isdir(repertory):

                if debug >= 2:
                    print(cyan + "createEmprises() : " + endC + bold + green +
                          "Traitement de : " + endC + repertory)

                # Récupération des images du dossier en entrée
                imagettes_jp2_tif_ecw_list = []
                imagettes_list = os.listdir(repertory)

                for elt1 in imagettes_list:
                    path_image = repertory + os.sep + elt1
                    if (os.path.isfile(path_image)) and (len(
                            elt1.rsplit('.', 1)) == 2) and (elt1.rsplit(
                                '.', 1)[1] in EXT_LIST):
                        if elt1.rsplit('.', 1)[1] in EXT_LIST_HDF5:
                            elt1_new = os.path.splitext(
                                elt1)[0] + extension_raster
                            path_image_new = repertory + os.sep + elt1_new
                            h5ToGtiff(path_image, path_image_new)
                            imagettes_jp2_tif_ecw_list.append(elt1_new)
                        else:
                            imagettes_jp2_tif_ecw_list.append(elt1)

                # Pour le cas ou le repertoire contient des fichiers images
                if not imagettes_jp2_tif_ecw_list == []:

                    # Cas ou chaque emprise d'image est un polygone
                    if is_not_assembled or is_optimize_emprise or is_optimize_emprise_nodata:

                        for imagette in imagettes_jp2_tif_ecw_list:
                            # Récupération des emprises de l'image
                            path_image = repertory + os.sep + imagette
                            path_info_acquisition = repertory
                            xmin, xmax, ymin, ymax = getEmpriseImage(
                                path_image)
                            coord_list = [
                                xmin, ymax, xmax, ymax, xmax, ymin, xmin, ymin,
                                xmin, ymax
                            ]

                            # Saisie des données
                            points_list.append(coord_list)

                            # Récupération du nom de l'image pour la création des champs
                            input_image_name = os.path.splitext(
                                os.path.basename(path_image))[0]
                            name_image_list.append(input_image_name)

                            # Cas optimisation de l'emprise en elevant les nodata
                            if is_optimize_emprise_nodata:

                                path_info_acquisition = path_image
                                optimize_emprise_nodata_shape = repertory_output + os.sep + input_image_name + extension_vector
                                optimize_emprise_tmp1_shape = repertory_output + os.sep + input_image_name + SUFFIX_TMP + str(
                                    1) + extension_vector
                                optimize_emprise_tmp2_shape = repertory_output + os.sep + input_image_name + SUFFIX_TMP + str(
                                    2) + extension_vector
                                optimize_emprise_tmp3_shape = repertory_output + os.sep + input_image_name + SUFFIX_TMP + str(
                                    3) + extension_vector
                                optimize_emprise_tmp4_shape = repertory_output + os.sep + input_image_name + SUFFIX_TMP + str(
                                    4) + extension_vector
                                binary_mask_zeros_raster = repertory_output + os.sep + input_image_name + SUFFIX_MASK_ZERO + extension_raster
                                optimize_emprise_nodata_shape_list.append(
                                    optimize_emprise_nodata_shape)

                                # Création masque binaire pour séparer les no data des vraies valeurs
                                no_data_value_img = getNodataValueImage(
                                    path_image)
                                if no_data_value_img == None:
                                    no_data_value_img = no_data_value
                                createBinaryMaskMultiBand(
                                    path_image, binary_mask_zeros_raster,
                                    no_data_value_img, CODAGE_8B)

                                # Vectorisation du masque binaire true data/false data -> polygone avec uniquement les vraies valeurs
                                if os.path.exists(
                                        optimize_emprise_nodata_shape):
                                    removeVectorFile(
                                        optimize_emprise_nodata_shape)

                                polygonizeRaster(binary_mask_zeros_raster,
                                                 optimize_emprise_tmp1_shape,
                                                 input_image_name,
                                                 ATTR_NAME_ID, format_vector)

                                # Nettoyage des polygones parasites pour ne garder que le polygone pricipale si l'option "all" n'est pas demandée
                                if not is_all_polygons_used:
                                    geometry_list = getGeomPolygons(
                                        optimize_emprise_tmp1_shape, None,
                                        None, format_vector)
                                    geometry_orded_dico = {}
                                    geometry_orded_list = []
                                    for geometry in geometry_list:
                                        area = geometry.GetArea()
                                        geometry_orded_dico[area] = geometry
                                        geometry_orded_list.append(area)
                                    geometry_orded_list.sort()
                                    if len(geometry_orded_list) > 0:
                                        max_area = geometry_orded_list[
                                            len(geometry_orded_list) - 1]
                                        geom_max = geometry_orded_dico[
                                            max_area]
                                        attribute_dico = {
                                            ATTR_NAME_ID: ogr.OFTInteger
                                        }
                                        polygons_attr_geom_dico = {}
                                        polygons_attr_geom_dico[str(1)] = [
                                            geom_max, {
                                                ATTR_NAME_ID: str(1)
                                            }
                                        ]
                                        createPolygonsFromGeometryList(
                                            attribute_dico,
                                            polygons_attr_geom_dico,
                                            optimize_emprise_tmp2_shape, epsg,
                                            format_vector)
                                    else:
                                        print(
                                            cyan + "createEmprise() : " +
                                            bold + yellow +
                                            " Attention!!! Fichier non traite (ne contient pas de polygone): "
                                            + optimize_emprise_tmp1_shape +
                                            endC)
                                        optimize_emprise_tmp2_shape = optimize_emprise_tmp1_shape
                                else:
                                    optimize_emprise_tmp2_shape = optimize_emprise_tmp1_shape

                                # Nettoyage des polygones simplification et supression des trous
                                cleanRingVector(optimize_emprise_tmp2_shape,
                                                optimize_emprise_tmp3_shape,
                                                format_vector)
                                simplifyVector(optimize_emprise_tmp3_shape,
                                               optimize_emprise_tmp4_shape, 2,
                                               format_vector)
                                if size_erode != 0.0:
                                    bufferVector(
                                        optimize_emprise_tmp4_shape,
                                        optimize_emprise_nodata_shape,
                                        size_erode * -1, "", 1.0, 10,
                                        format_vector)
                                else:
                                    copyVectorFile(
                                        optimize_emprise_tmp4_shape,
                                        optimize_emprise_nodata_shape,
                                        format_vector)

                                # Nettoyage des fichier intermediaires
                                if not save_results_intermediate:
                                    removeFile(binary_mask_zeros_raster)
                                    removeVectorFile(
                                        optimize_emprise_tmp1_shape)
                                    removeVectorFile(
                                        optimize_emprise_tmp2_shape)
                                    removeVectorFile(
                                        optimize_emprise_tmp3_shape)
                                    removeVectorFile(
                                        optimize_emprise_tmp4_shape)

                            # Recuperation de la date et l'heure d'acquisition
                            # Gestion de l'emprise optimisé nodata on utilise le nom de l'image pour la date d'acquisition sion c'est le nom du repertoire
                            getDataToFiels(
                                path_info_acquisition, is_not_date,
                                is_optimize_emprise
                                or is_optimize_emprise_nodata, separ_name,
                                pos_date, nb_char_date, separ_date,
                                points_list, ref_dossier_list, name_rep_list,
                                date_list, heure_list)

                    # Cas ou l'on prend l'emprise globale des images un seul plolygone correspondant a l'emprise globale
                    else:

                        # Récupération des emprises des images du dossier
                        liste_x_l = []
                        liste_y_b = []
                        liste_x_r = []
                        liste_y_t = []

                        for imagette in imagettes_jp2_tif_ecw_list:
                            path_image = repertory + os.sep + imagette
                            xmin, xmax, ymin, ymax = getEmpriseImage(
                                path_image)

                            liste_x_l.append(xmin)
                            liste_x_r.append(xmax)
                            liste_y_b.append(ymin)
                            liste_y_t.append(ymax)

                        # Récupération des min et max de la liste des imagettes
                        # Coin haut gauche
                        xmin_l_t = str(min(liste_x_l))

                        # Coin bas gauche
                        ymin_l_b = str(min(liste_y_b))
                        xmin_l_b = xmin_l_t

                        # Coin bas doite
                        xmax_r_b = str(max(liste_x_r))

                        # Coin haut droite
                        ymax_r_t = str(max(liste_y_t))
                        xmax_r_t = xmax_r_b
                        ymax_r_b = ymin_l_b
                        ymin_l_t = ymax_r_t

                        coord_list = [
                            xmin_l_t, ymin_l_t, xmin_l_b, ymin_l_b, xmax_r_b,
                            ymax_r_b, xmax_r_t, ymax_r_t, xmin_l_t, ymin_l_t
                        ]
                        points_list.append(coord_list)

                        # Récupération du nom du répertoire pour création des champs
                        getDataToFiels(repertory, is_not_date,
                                       is_optimize_emprise, separ_name,
                                       pos_date, nb_char_date, separ_date,
                                       points_list, ref_dossier_list,
                                       name_rep_list, date_list, heure_list)

        #  Préparation des attribute_dico et polygons_attr_coord_dico
        if is_not_assembled:
            attribute_dico = {
                ATTR_NAME_ID: ogr.OFTInteger,
                ATTR_NAME_NOMIMAGE: ogr.OFTString,
                ATTR_NAME_DATEACQUI: ogr.OFTDate,
                ATTR_NAME_HEUREACQUI: ogr.OFTString
            }

            for i in range(len(points_list)):
                polygons_attr_coord_dico[str(i)] = [
                    points_list[i], {
                        ATTR_NAME_ID: i + 1,
                        ATTR_NAME_NOMIMAGE: name_image_list[i],
                        ATTR_NAME_DATEACQUI: date_list[i],
                        ATTR_NAME_HEUREACQUI: heure_list[i]
                    }
                ]

        else:
            attribute_dico = {
                ATTR_NAME_NOMIMAGE: ogr.OFTString,
                ATTR_NAME_REFDOSSIER: ogr.OFTString,
                ATTR_NAME_DATEACQUI: ogr.OFTDate,
                ATTR_NAME_HEUREACQUI: ogr.OFTString
            }

            for i in range(len(points_list)):
                polygons_attr_coord_dico[str(i)] = [
                    points_list[i], {
                        ATTR_NAME_NOMIMAGE: name_rep_list[i],
                        ATTR_NAME_REFDOSSIER: ref_dossier_list[i],
                        ATTR_NAME_DATEACQUI: date_list[i],
                        ATTR_NAME_HEUREACQUI: heure_list[i]
                    }
                ]

        # Cas optimisation de l'emprise en elevant les nodata
        colum = ""
        if is_optimize_emprise_nodata:

            if is_not_assembled:
                file_vector = output_file
            else:
                file_vector = file_vector_detail

            # Fusion des polygones d'emprises images optimisées sans nodata
            polygons_attr_geom_dico = {}
            i = 0
            for shape_file in optimize_emprise_nodata_shape_list:
                geom_list = getGeomPolygons(shape_file, ATTR_NAME_ID, 1,
                                            format_vector)
                if not is_all_polygons_used:
                    if geom_list is not None and len(geom_list) > 0:
                        geom = geom_list[0]
                        polygons_attr_geom_dico[str(i)] = [
                            geom, polygons_attr_coord_dico[str(i)][1]
                        ]
                else:
                    j = 1
                    for geom in geom_list:
                        polygons_attr_geom_dico[str(i + 1000000 * j)] = [
                            geom, polygons_attr_coord_dico[str(i)][1]
                        ]
                        j += 1
                i += 1

            createPolygonsFromGeometryList(attribute_dico,
                                           polygons_attr_geom_dico,
                                           file_vector, epsg, format_vector)

            # Suppression des fichiers intermediaires
            if not save_results_intermediate:
                for vector_to_del in optimize_emprise_nodata_shape_list:
                    removeVectorFile(vector_to_del)

        else:
            # Utilisation de createPolygonsFromCoordList()
            if is_optimize_emprise:
                file_vector = file_vector_detail
            else:
                file_vector = output_file

            # Creation des polygones a partir de la liste des coordonnées des emprises
            createPolygonsFromCoordList(attribute_dico,
                                        polygons_attr_coord_dico, file_vector,
                                        epsg, format_vector)

        # Cas fusion des polygones pour avoir une emprise constituée d'un seul polygone
        if not is_not_assembled:
            if is_optimize_emprise or is_optimize_emprise_nodata or is_all_polygons_used:
                column_name = ""
                if is_all_polygons_used:
                    column_name = ATTR_NAME_DATEACQUI
                elif is_optimize_emprise or is_optimize_emprise_nodata:
                    column_name = ATTR_NAME_NOMIMAGE

                # Fusion des polygones
                if is_all_polygons_used and is_not_date:
                    fusionNeighbourPolygonsBySameValue(file_vector,
                                                       output_file,
                                                       column_name,
                                                       format_vector)
                    #dissolveVector(file_vector, output_file, column_name, format_vector)
                else:
                    if not geometries2multigeometries(file_vector, output_file,
                                                      column_name,
                                                      format_vector):
                        copyVectorFile(file_vector, output_file, format_vector)

                # Suppression des fichiers intermediaires
                if not save_results_intermediate:
                    removeVectorFile(file_vector_detail)

    return
Exemplo n.º 13
0
def skyViewFactor(grid_input,
                  grid_output,
                  mns_input,
                  classif_input,
                  class_build_list,
                  dim_grid_x,
                  dim_grid_y,
                  svf_radius,
                  svf_method,
                  svf_dlevel,
                  svf_ndirs,
                  epsg,
                  no_data_value,
                  path_time_log,
                  format_raster='GTiff',
                  format_vector='ESRI Shapefile',
                  extension_raster=".tif",
                  extension_vector=".shp",
                  save_results_intermediate=False,
                  overwrite=True):

    print(bold + yellow + "Début du calcul de l'indicateur Sky View Factor." +
          endC + "\n")
    timeLine(path_time_log,
             "Début du calcul de l'indicateur Sky View Factor : ")

    if debug >= 3:
        print(bold + green + "skyViewFactor() : Variables dans la fonction" +
              endC)
        print(cyan + "skyViewFactor() : " + endC + "grid_input : " +
              str(grid_input) + endC)
        print(cyan + "skyViewFactor() : " + endC + "grid_output : " +
              str(grid_output) + endC)
        print(cyan + "skyViewFactor() : " + endC + "mns_input : " +
              str(mns_input) + endC)
        print(cyan + "skyViewFactor() : " + endC + "class_build_list : " +
              str(class_build_list) + endC)
        print(cyan + "skyViewFactor() : " + endC + "classif_input : " +
              str(classif_input) + endC)
        print(cyan + "skyViewFactor() : " + endC + "dim_grid_x : " +
              str(dim_grid_x) + endC)
        print(cyan + "skyViewFactor() : " + endC + "dim_grid_y : " +
              str(dim_grid_y) + endC)
        print(cyan + "skyViewFactor() : " + endC + "svf_radius : " +
              str(svf_radius) + endC)
        print(cyan + "skyViewFactor() : " + endC + "svf_method : " +
              str(svf_method) + endC)
        print(cyan + "skyViewFactor() : " + endC + "svf_dlevel : " +
              str(svf_dlevel) + endC)
        print(cyan + "skyViewFactor() : " + endC + "svf_ndirs : " +
              str(svf_ndirs) + endC)
        print(cyan + "skyViewFactor() : " + endC + "epsg : " + str(epsg) +
              endC)
        print(cyan + "skyViewFactor() : " + endC + "no_data_value : " +
              str(no_data_value) + endC)
        print(cyan + "skyViewFactor() : " + endC + "path_time_log : " +
              str(path_time_log) + endC)
        print(cyan + "skyViewFactor() : " + endC + "format_raster : " +
              str(format_raster) + endC)
        print(cyan + "skyViewFactor() : " + endC + "format_vector : " +
              str(format_vector) + endC)
        print(cyan + "skyViewFactor() : " + endC + "extension_raster : " +
              str(extension_raster) + endC)
        print(cyan + "skyViewFactor() : " + endC + "extension_vector : " +
              str(extension_vector) + endC)
        print(cyan + "skyViewFactor() : " + endC +
              "save_results_intermediate : " + str(save_results_intermediate) +
              endC)
        print(cyan + "skyViewFactor() : " + endC + "overwrite : " +
              str(overwrite) + endC)

    # Constantes liées aux fichiers
    SKY_VIEW_FIELD = 'SkyView'

    BASE_FILE_TILE = 'tile_'
    BASE_FILE_POLY = 'poly_'
    SUFFIX_VECTOR_BUFF = '_buff'
    SUFFIX_VECTOR_TEMP = '_temp'

    # Constantes liées à l'arborescence
    FOLDER_SHP = 'SHP'
    FOLDER_SGRD = 'SGRD'
    FOLDER_TIF = 'TIF'
    SUB_FOLDER_DEM = 'DEM'
    SUB_FOLDER_SVF = 'SVF'
    SUB_SUB_FOLDER_BUF = 'BUF'

    if not os.path.exists(grid_output) or overwrite:

        ############################################
        ### Préparation générale des traitements ###
        ############################################

        if os.path.exists(grid_output):
            removeVectorFile(grid_output)

        temp_path = os.path.dirname(grid_output) + os.sep + "SkyViewFactor"
        sky_view_factor_raster = temp_path + os.sep + "sky_view_factor" + extension_raster

        cleanTempData(temp_path)
        os.makedirs(temp_path + os.sep + FOLDER_SHP)
        os.makedirs(temp_path + os.sep + FOLDER_SGRD + os.sep + SUB_FOLDER_DEM)
        os.makedirs(temp_path + os.sep + FOLDER_SGRD + os.sep + SUB_FOLDER_SVF)
        os.makedirs(temp_path + os.sep + FOLDER_TIF + os.sep + SUB_FOLDER_DEM)
        os.makedirs(temp_path + os.sep + FOLDER_TIF + os.sep + SUB_FOLDER_SVF)
        os.makedirs(temp_path + os.sep + FOLDER_TIF + os.sep + SUB_FOLDER_SVF +
                    os.sep + SUB_SUB_FOLDER_BUF)

        # Récupération de la résolution du raster d'entrée
        pixel_size_x, pixel_size_y = getPixelWidthXYImage(mns_input)
        print(bold + "Taille de pixel du fichier '%s' :" % (mns_input) + endC)
        print("    pixel_size_x = " + str(pixel_size_x))
        print("    pixel_size_y = " + str(pixel_size_y) + "\n")

        ###############################################
        ### Création des fichiers emprise et grille ###
        ###############################################

        print(bold + cyan + "Création des fichiers emprise et grille :" + endC)
        timeLine(path_time_log,
                 "    Création des fichiers emprise et grille : ")

        emprise_file = temp_path + os.sep + "emprise" + extension_vector
        quadrillage_file = temp_path + os.sep + "quadrillage" + extension_vector

        # Création du fichier d'emprise
        createVectorMask(mns_input, emprise_file, no_data_value, format_vector)

        # Création du fichier grille
        createGridVector(emprise_file, quadrillage_file, dim_grid_x,
                         dim_grid_y, None, overwrite, epsg, format_vector)

        #############################################################
        ### Extraction des carreaux de découpage et bufferisation ###
        #############################################################

        print(bold + cyan + "Extraction des carreaux de découpage :" + endC)
        timeLine(path_time_log, "    Extraction des carreaux de découpage : ")

        split_tile_vector_list = splitVector(quadrillage_file,
                                             temp_path + os.sep + FOLDER_SHP,
                                             "", epsg, format_vector,
                                             extension_vector)

        split_tile_vector_buff_list = []

        # Boucle sur les fichiers polygones quadrillage
        for split_tile_vector in split_tile_vector_list:
            repertory_temp_output = os.path.dirname(split_tile_vector)
            base_name = os.path.splitext(
                os.path.basename(split_tile_vector))[0]
            split_tile_buff_vector = repertory_temp_output + os.sep + base_name + SUFFIX_VECTOR_BUFF + extension_vector
            split_tile_buff_vector_temp = repertory_temp_output + os.sep + base_name + SUFFIX_VECTOR_BUFF + SUFFIX_VECTOR_TEMP + extension_vector

            # Bufferisation
            bufferVector(split_tile_vector, split_tile_buff_vector_temp,
                         svf_radius, "", 1.0, 10, format_vector)

            # Re-découpage avec l'emprise si la taille intersecte avec l'emprise
            if cutVector(emprise_file, split_tile_buff_vector_temp,
                         split_tile_buff_vector, overwrite, format_vector):
                split_tile_vector_buff_list.append(split_tile_buff_vector)

        ##########################################################
        ### Découpage du MNS/MNH à l'emprise de chaque carreau ###
        ##########################################################

        print(bold + cyan + "Découpage du raster en tuiles :" + endC)
        timeLine(path_time_log, "    Découpage du raster en tuiles : ")

        # Boucle sur les fichiers polygones quadrillage bufferisés
        for i in range(len(split_tile_vector_list)):
            print("Traitement de la tuile " + str(int(i) + 1) +
                  str(len(split_tile_vector_list)) + "...")

            split_tile_vector = split_tile_vector_list[i]
            repertory_temp_output = os.path.dirname(split_tile_vector)
            base_name = os.path.splitext(
                os.path.basename(split_tile_vector))[0]
            split_tile_buff_vector = repertory_temp_output + os.sep + base_name + SUFFIX_VECTOR_BUFF + extension_vector
            dem_tif_file = temp_path + os.sep + FOLDER_TIF + os.sep + SUB_FOLDER_DEM + os.sep + BASE_FILE_TILE + str(
                i) + extension_raster

            if os.path.exists(split_tile_buff_vector):
                cutImageByVector(split_tile_buff_vector, mns_input,
                                 dem_tif_file, pixel_size_x, pixel_size_y,
                                 no_data_value, epsg, format_raster,
                                 format_vector)

        ##################################################
        ### Calcul du SVF pour chaque dalle du MNS/MNH ###
        ##################################################

        print(bold + cyan + "Calcul du SVF pour chaque tuile via SAGA :" +
              endC)
        timeLine(path_time_log,
                 "    Calcul du SVF pour chaque tuile via SAGA : ")

        svf_buf_tif_file_list = []

        # Boucle sur les tuiles du raster d'entrée créées par chaque polygone quadrillage
        for i in range(len(split_tile_vector_list)):
            # Calcul de Sky View Factor par SAGA
            dem_tif_file = temp_path + os.sep + FOLDER_TIF + os.sep + SUB_FOLDER_DEM + os.sep + BASE_FILE_TILE + str(
                i) + extension_raster
            svf_buf_tif_file = temp_path + os.sep + FOLDER_TIF + os.sep + SUB_FOLDER_SVF + os.sep + SUB_SUB_FOLDER_BUF + os.sep + BASE_FILE_TILE + str(
                i) + extension_raster

            if os.path.exists(dem_tif_file):
                computeSkyViewFactor(dem_tif_file, svf_buf_tif_file,
                                     svf_radius, svf_method, svf_dlevel,
                                     svf_ndirs, save_results_intermediate)
                svf_buf_tif_file_list.append(svf_buf_tif_file)

        ###################################################################
        ### Re-découpage des tuiles du SVF avec les tuilles sans tampon ###
        ###################################################################

        print(bold + cyan +
              "Re-découpage des tuiles du SVF avec les tuilles sans tampon :" +
              endC)
        timeLine(
            path_time_log,
            "    Re-découpage des tuiles du SVF avec les tuilles sans tampon : "
        )

        folder_output_svf = temp_path + os.sep + FOLDER_TIF + os.sep + SUB_FOLDER_SVF + os.sep

        # Boucle sur les tuiles du SVF bufferisées
        for i in range(len(split_tile_vector_list)):
            print("Traitement de la tuile " + str(int(i) + 1) + "/" +
                  str(len(split_tile_vector_list)) + "...")

            split_tile_vector = split_tile_vector_list[i]
            svf_buf_tif_file = temp_path + os.sep + FOLDER_TIF + os.sep + SUB_FOLDER_SVF + os.sep + SUB_SUB_FOLDER_BUF + os.sep + BASE_FILE_TILE + str(
                i) + extension_raster
            base_name = os.path.splitext(os.path.basename(svf_buf_tif_file))[0]
            svf_tif_file = folder_output_svf + os.sep + base_name + extension_raster
            if os.path.exists(svf_buf_tif_file):
                cutImageByVector(split_tile_vector, svf_buf_tif_file,
                                 svf_tif_file, pixel_size_x, pixel_size_y,
                                 no_data_value, epsg, format_raster,
                                 format_vector)

        ####################################################################
        ### Assemblage des tuiles du SVF et calcul de l'indicateur final ###
        ####################################################################

        print(
            bold + cyan +
            "Assemblage des tuiles du SVF et calcul de l'indicateur final :" +
            endC)
        timeLine(
            path_time_log,
            "    Assemblage des tuiles du SVF et calcul de l'indicateur final : "
        )

        classif_input_temp = temp_path + os.sep + FOLDER_TIF + os.sep + "classif_input" + SUFFIX_VECTOR_TEMP + extension_raster
        sky_view_factor_temp = temp_path + os.sep + FOLDER_TIF + os.sep + "sky_view_factor" + SUFFIX_VECTOR_TEMP + extension_raster  # Issu de l'assemblage des dalles
        sky_view_factor_temp_temp = temp_path + os.sep + FOLDER_TIF + os.sep + "sky_view_factor" + SUFFIX_VECTOR_TEMP + SUFFIX_VECTOR_TEMP + extension_raster  # Issu du redécoupage pour entrer correctement dans le BandMath
        sky_view_factor_temp_temp_temp = temp_path + os.sep + FOLDER_TIF + os.sep + "sky_view_factor" + SUFFIX_VECTOR_TEMP + SUFFIX_VECTOR_TEMP + SUFFIX_VECTOR_TEMP + extension_raster  # Issu de la suppression des pixels bâti

        # Assemblage des tuiles du SVF créées précédemment pour ne former qu'un seul raster SVF
        selectAssembyImagesByHold(
            emprise_file, [folder_output_svf], sky_view_factor_temp, False,
            False, epsg, False, False, False, False, 1.0, pixel_size_x,
            pixel_size_y, no_data_value, "_", 2, 8, "", path_time_log,
            "_error", "_merge", "_clean", "_stack", format_raster,
            format_vector, extension_raster, extension_vector,
            save_results_intermediate, overwrite)

        # Suppression des valeurs de SVF pour les bâtiments
        cutImageByVector(emprise_file, classif_input, classif_input_temp,
                         pixel_size_x, pixel_size_y, no_data_value, epsg,
                         format_raster, format_vector)
        cutImageByVector(emprise_file, sky_view_factor_temp,
                         sky_view_factor_temp_temp, pixel_size_x, pixel_size_y,
                         no_data_value, epsg, format_raster, format_vector)

        expression = ""
        for id_class in class_build_list:
            expression += "im1b1==%s or " % (str(id_class))
        expression = expression[:-4]
        command = "otbcli_BandMath -il %s %s -out %s float -exp '%s ? -1 : im2b1'" % (
            classif_input_temp, sky_view_factor_temp_temp,
            sky_view_factor_temp_temp_temp, expression)
        if debug >= 3:
            print(command)
        exit_code = os.system(command)
        if exit_code != 0:
            print(command)
            print(
                cyan + "skyViewFactor() : " + bold + red +
                "!!! Une erreur c'est produite au cours de la commande otbcli_BandMath : "
                + command + ". Voir message d'erreur." + endC,
                file=sys.stderr)
            raise

        command = "gdal_translate -a_srs EPSG:%s -a_nodata %s -of %s %s %s" % (
            str(epsg), str(no_data_value), format_raster,
            sky_view_factor_temp_temp_temp, sky_view_factor_raster)
        if debug >= 3:
            print(command)
        exit_code = os.system(command)
        if exit_code != 0:
            print(command)
            print(
                cyan + "skyViewFactor() : " + bold + red +
                "!!! Une erreur c'est produite au cours de la commande gdal_translate : "
                + command + ". Voir message d'erreur." + endC,
                file=sys.stderr)
            raise

        # Croisement vecteur-raster pour récupérer la moyenne de SVF par polygone du fichier maillage d'entrée
        col_to_delete_list = [
            "min", "max", "median", "sum", "std", "unique", "range"
        ]
        statisticsVectorRaster(
            sky_view_factor_raster, grid_input, grid_output, 1, False, False,
            True, col_to_delete_list, [], {}, path_time_log, True,
            format_vector, save_results_intermediate, overwrite
        )  ## Erreur NaN pour polygones entièrement bâtis (soucis pour la suite ?)

        renameFieldsVector(grid_output, ['mean'], [SKY_VIEW_FIELD],
                           format_vector)

        ##########################################
        ### Nettoyage des fichiers temporaires ###
        ##########################################

        if not save_results_intermediate:
            deleteDir(temp_path)

    else:
        print(bold + magenta + "Le calcul du Sky View Factor a déjà eu lieu." +
              endC + "\n")

    print(bold + yellow + "Fin du calcul de l'indicateur Sky View Factor." +
          endC + "\n")
    timeLine(path_time_log, "Fin du calcul de l'indicateur Sky View Factor : ")

    return
def sobelToOuvrages(input_im_seuils_dico, output_dir, input_cut_vector, no_data_value, path_time_log, format_raster='GTiff', format_vector="ESRI Shapefile", extension_raster=".tif", extension_vector=".shp", save_results_intermediate=True, overwrite=True):

    # Constantes
    REPERTORY_TEMP = "temp_sobel"
    CODAGE_8B = "uint8"
    ID = "id"

    # Mise à jour du Log
    starting_event = "sobelToOuvrages() : Select Sobel to ouvrages starting : "
    timeLine(path_time_log,starting_event)

    # Création du répertoire de sortie s'il n'existe pas déjà
    if not os.path.exists(output_dir + os.sep + REPERTORY_TEMP):
        os.makedirs(output_dir + os.sep + REPERTORY_TEMP)

    # Affichage des paramètres
    if debug >= 3:
        print(bold + green + "Variables dans SobelToOuvrages - Variables générales" + endC)
        print(cyan + "sobelToOuvrages() : " + endC + "input_im_seuils_dico : " + str(input_im_seuils_dico) + endC)
        print(cyan + "sobelToOuvrages() : " + endC + "output_dir : " + str(output_dir) + endC)
        print(cyan + "sobelToOuvrages() : " + endC + "input_cut_vector : " + str(input_cut_vector) + endC)
        print(cyan + "sobelToOuvrages() : " + endC + "path_time_log : " + str(path_time_log) + endC)
        print(cyan + "sobelToOuvrages() : " + endC + "format_raster : " + str(format_raster) + endC)
        print(cyan + "sobelToOuvrages() : " + endC + "format_vector : " + str(format_vector) + endC)
        print(cyan + "sobelToOuvrages() : " + endC + "extension_raster : " + str(extension_raster) + endC)
        print(cyan + "sobelToOuvrages() : " + endC + "extension_vector : " + str(extension_vector) + endC)
        print(cyan + "sobelToOuvrages() : " + endC + "save_results_intermediate : " + str(save_results_intermediate) + endC)
        print(cyan + "sobelToOuvrages() : " + endC + "overwrite : " + str(overwrite) + endC)

    sobel_ouvrages_shp_list = []

    for elt in input_im_seuils_dico.split():
        raw_image = elt.split(":")[0]
        sobel_image = elt.split(":")[1].split(",")[0]

        for i in range(1,len(elt.split(":")[1].split(","))):
            seuil = elt.split(":")[1].split(",")[i]

            # Initialisation des noms des fichiers en sortie
            image_name = os.path.splitext(os.path.basename(raw_image))[0]
            sobel_binary_mask = output_dir + os.sep + REPERTORY_TEMP + os.sep + "bin_mask_sobel_" + image_name + "_" + str(seuil) + extension_raster
            sobel_binary_mask_vector_name = "bin_mask_vect_sobel_" + image_name + "_" + str(seuil)
            sobel_binary_mask_vector = output_dir + os.sep + REPERTORY_TEMP + os.sep + sobel_binary_mask_vector_name + extension_vector
            sobel_binary_mask_vector_cleaned = output_dir + os.sep + REPERTORY_TEMP + os.sep + "bin_mask_vect_sobel_cleaned_" + image_name + "_" + str(seuil) + extension_vector
            sobel_decoup = output_dir + os.sep + "sobel_decoup_" + image_name + "_" + str(seuil) + extension_vector

            binary_mask_zeros_name = "b_mask_zeros_vect_" + image_name
            binary_mask_zeros_raster = output_dir + os.sep + REPERTORY_TEMP + os.sep + "b_mask_zeros_" + image_name + extension_raster
            binary_mask_zeros_vector = output_dir + os.sep + REPERTORY_TEMP + os.sep + binary_mask_zeros_name + extension_vector
            binary_mask_zeros_vector_simpl = output_dir + os.sep + REPERTORY_TEMP + os.sep + "b_mask_zeros_vect_simpl_" + image_name + extension_vector
            true_values_buffneg = output_dir + os.sep + REPERTORY_TEMP + os.sep + "true_values_buffneg_" + image_name + extension_vector
            ouvrages_decoup_final = output_dir + os.sep + "ouvrages_sobel_" + image_name + "_" + str(seuil) + extension_vector

            # Création du masque binaire
            createBinaryMask(sobel_image, sobel_binary_mask, float(seuil), True)

            # Découpe du masque binaire par le shapefile de découpe en entrée
            cutImageByVector(input_cut_vector, sobel_binary_mask, sobel_decoup, None, None, no_data_value, 0, format_raster, format_vector)

            # Vectorisation du masque binaire Sobel découpé
            polygonizeRaster(sobel_decoup, sobel_binary_mask_vector, sobel_binary_mask_vector_name)

            # Création masque binaire pour séparer les no data des vraies valeurs
            nodata_value = getNodataValueImage(raw_image)
            if no_data_value == None :
                no_data_value = 0
            createBinaryMaskMultiBand(raw_image, binary_mask_zeros_raster, no_data_value, CODAGE_8B)

            # Vectorisation du masque binaire true data/false data -> polygone avec uniquement les vraies valeurs
            if os.path.exists(binary_mask_zeros_vector):
                removeVectorFile(binary_mask_zeros_vector, format_vector)

            # Polygonisation
            polygonizeRaster(binary_mask_zeros_raster, binary_mask_zeros_vector, binary_mask_zeros_name, ID, format_vector)

            # Simplification du masque obtenu
            simplifyVector(binary_mask_zeros_vector, binary_mask_zeros_vector_simpl, 2, format_vector)

            # Buffer négatif sur ce polygone
            bufferVector(binary_mask_zeros_vector_simpl, true_values_buffneg, -2, "", 1.0, 10, format_vector)
            cleanMiniAreaPolygons(sobel_binary_mask_vector, sobel_binary_mask_vector_cleaned, 15, ID, format_vector)

            # Découpe par le buffer négatif autour des true data
            cutVectorAll(true_values_buffneg, sobel_binary_mask_vector_cleaned, ouvrages_decoup_final, overwrite, format_vector)
            sobel_ouvrages_shp_list.append(ouvrages_decoup_final)

        return sobel_ouvrages_shp_list
def populationVulnerability(
        input_division,
        input_footprint,
        input_population,
        input_built,
        output_vulnerability,
        id_div='id',
        id_pop='IdINSPIRE',
        id_blt='ID',
        stake_field='Ind',
        health_vuln_field_list=['Ind_0_3', 'Ind_4_5', 'Ind_65_79', 'Ind_80p'],
        social_vuln_field_list=['Men_pauv'],
        height_field='HAUTEUR',
        built_sql_filter="NATURE LIKE 'Indiff%renci%e' AND (USAGE1 LIKE 'Indiff%renci%e' OR USAGE1 LIKE 'R%sidentiel' OR USAGE2 LIKE 'R%sidentiel' OR USAGE2 IS NULL) AND HAUTEUR IS NOT NULL AND ST_Area(geom) >= 20",
        epsg=2154,
        format_vector='ESRI Shapefile',
        postgis_ip_host='localhost',
        postgis_num_port=5432,
        postgis_user_name='postgres',
        postgis_password='******',
        postgis_database_name='uhi_vuln',
        postgis_schema_name='public',
        postgis_encoding='UTF-8',
        path_time_log='',
        save_results_intermediate=False,
        overwrite=True):

    if debug >= 3:
        print('\n' + bold + green +
              "Vulnérabilité des populations - Variables dans la fonction :" +
              endC)
        print(cyan + "    populationVulnerability() : " + endC +
              "input_division : " + str(input_division) + endC)
        print(cyan + "    populationVulnerability() : " + endC +
              "input_footprint : " + str(input_footprint) + endC)
        print(cyan + "    populationVulnerability() : " + endC +
              "input_population : " + str(input_population) + endC)
        print(cyan + "    populationVulnerability() : " + endC +
              "input_built : " + str(input_built) + endC)
        print(cyan + "    populationVulnerability() : " + endC +
              "output_vulnerability : " + str(output_vulnerability) + endC)
        print(cyan + "    populationVulnerability() : " + endC + "id_div : " +
              str(id_div) + endC)
        print(cyan + "    populationVulnerability() : " + endC + "id_pop : " +
              str(id_pop) + endC)
        print(cyan + "    populationVulnerability() : " + endC + "id_blt : " +
              str(id_blt) + endC)
        print(cyan + "    populationVulnerability() : " + endC +
              "stake_field : " + str(stake_field) + endC)
        print(cyan + "    populationVulnerability() : " + endC +
              "health_vuln_field_list : " + str(health_vuln_field_list) + endC)
        print(cyan + "    populationVulnerability() : " + endC +
              "social_vuln_field_list : " + str(social_vuln_field_list) + endC)
        print(cyan + "    populationVulnerability() : " + endC +
              "height_field : " + str(height_field) + endC)
        print(cyan + "    populationVulnerability() : " + endC +
              "built_sql_filter : " + str(built_sql_filter) + endC)
        print(cyan + "    populationVulnerability() : " + endC + "epsg : " +
              str(epsg) + endC)
        print(cyan + "    populationVulnerability() : " + endC +
              "format_vector : " + str(format_vector) + endC)
        print(cyan + "    populationVulnerability() : " + endC +
              "postgis_ip_host : " + str(postgis_ip_host) + endC)
        print(cyan + "    populationVulnerability() : " + endC +
              "postgis_num_port : " + str(postgis_num_port) + endC)
        print(cyan + "    populationVulnerability() : " + endC +
              "postgis_user_name : " + str(postgis_user_name) + endC)
        print(cyan + "    populationVulnerability() : " + endC +
              "postgis_password : "******"    populationVulnerability() : " + endC +
              "postgis_database_name : " + str(postgis_database_name) + endC)
        print(cyan + "    populationVulnerability() : " + endC +
              "postgis_schema_name : " + str(postgis_schema_name) + endC)
        print(cyan + "    populationVulnerability() : " + endC +
              "postgis_encoding : " + str(postgis_encoding) + endC)
        print(cyan + "    populationVulnerability() : " + endC +
              "path_time_log : " + str(path_time_log) + endC)
        print(cyan + "    populationVulnerability() : " + endC +
              "save_results_intermediate : " + str(save_results_intermediate) +
              endC)
        print(cyan + "    populationVulnerability() : " + endC +
              "overwrite : " + str(overwrite) + endC + '\n')

    # Définition des constantes
    PREFIX_STAND = 'S_'
    STAKE_IND_FIELD = 'enjeu'
    HEALTH_VULN_IND_FIELD = 'vuln_san'
    SOCIAL_VULN_IND_FIELD = 'vuln_soc'
    GLOBAL_VULN_IND_FIELD = 'vuln_glo'

    # Mise à jour du log
    starting_event = "populationVulnerability() : Début du traitement : "
    timeLine(path_time_log, starting_event)

    print(cyan + "populationVulnerability() : " + bold + green +
          "DEBUT DES TRAITEMENTS" + endC + '\n')

    # Définition des variables tables/champs PostGIS
    div_table = 'i_division'
    ftp_table = 'i_footprint'
    pop_table = 'i_population'
    blt_table = 'i_built'
    vuln_table = 'o_vulnerability'
    clean_pop_table = 't_clean_pop'
    clean_blt_table = 't_clean_blt'
    inter_pop_blt_table = 't_inter_pop_blt'
    inter_popblt_div_table = 't_inter_popblt_div'
    div_no_pop_table = 't_div_with_no_pop'
    built_area_field = 'surf_bati'
    floor_area_field = 'surf_habit'
    inter_area_field = 'surf_inter'

    # Définition de variables diverses
    pop_fields_to_treat = [stake_field
                           ] + health_vuln_field_list + social_vuln_field_list
    pop_fields_to_keep = [id_pop] + pop_fields_to_treat
    blt_fields_to_keep = [id_blt] + [height_field]
    pop_fields_to_treat_str = stake_field
    for health_vuln_field in health_vuln_field_list:
        pop_fields_to_treat_str += ', ' + health_vuln_field
    for social_vuln_field in social_vuln_field_list:
        pop_fields_to_treat_str += ', ' + social_vuln_field

    # Nettoyage des traitements précédents
    if overwrite:
        if debug >= 3:
            print(cyan + "populationVulnerability() : " + endC +
                  "Nettoyage des traitements précédents." + endC + '\n')
        removeVectorFile(output_vulnerability, format_vector=format_vector)
        dropDatabase(postgis_database_name,
                     user_name=postgis_user_name,
                     password=postgis_password,
                     ip_host=postgis_ip_host,
                     num_port=postgis_num_port,
                     schema_name=postgis_schema_name)
    else:
        if os.path.exists(output_vulnerability):
            print(cyan + "populationVulnerability() : " + bold + yellow +
                  "Le fichier de sortie existe déjà et ne sera pas regénéré." +
                  endC + '\n')
            raise
        pass

    ####################################################################

    #############
    # Etape 1/5 # Préparation de la base de données PostGIS
    #############

    print(
        cyan + "populationVulnerability() : " + bold + green +
        "ETAPE 1/5 - Début de la préparation de la base de données PostGIS." +
        endC + '\n')

    createDatabase(postgis_database_name,
                   user_name=postgis_user_name,
                   password=postgis_password,
                   ip_host=postgis_ip_host,
                   num_port=postgis_num_port,
                   schema_name=postgis_schema_name)
    div_table = importVectorByOgr2ogr(postgis_database_name,
                                      input_division,
                                      div_table,
                                      user_name=postgis_user_name,
                                      password=postgis_password,
                                      ip_host=postgis_ip_host,
                                      num_port=postgis_num_port,
                                      schema_name=postgis_schema_name,
                                      epsg=epsg,
                                      codage=postgis_encoding)
    ftp_table = importVectorByOgr2ogr(postgis_database_name,
                                      input_footprint,
                                      ftp_table,
                                      user_name=postgis_user_name,
                                      password=postgis_password,
                                      ip_host=postgis_ip_host,
                                      num_port=postgis_num_port,
                                      schema_name=postgis_schema_name,
                                      epsg=epsg,
                                      codage=postgis_encoding)
    pop_table = importVectorByOgr2ogr(postgis_database_name,
                                      input_population,
                                      pop_table,
                                      user_name=postgis_user_name,
                                      password=postgis_password,
                                      ip_host=postgis_ip_host,
                                      num_port=postgis_num_port,
                                      schema_name=postgis_schema_name,
                                      epsg=epsg,
                                      codage=postgis_encoding)
    blt_table = importVectorByOgr2ogr(postgis_database_name,
                                      input_built,
                                      blt_table,
                                      user_name=postgis_user_name,
                                      password=postgis_password,
                                      ip_host=postgis_ip_host,
                                      num_port=postgis_num_port,
                                      schema_name=postgis_schema_name,
                                      epsg=epsg,
                                      codage=postgis_encoding)
    connection = openConnection(postgis_database_name,
                                user_name=postgis_user_name,
                                password=postgis_password,
                                ip_host=postgis_ip_host,
                                num_port=postgis_num_port,
                                schema_name=postgis_schema_name)
    cursor = connection.cursor()
    query = "CREATE INDEX IF NOT EXISTS %s_geom_gist ON %s USING GIST (geom);\n" % (
        div_table, div_table)
    query += "CREATE INDEX IF NOT EXISTS %s_geom_gist ON %s USING GIST (geom);\n" % (
        ftp_table, ftp_table)
    query += "CREATE INDEX IF NOT EXISTS %s_geom_gist ON %s USING GIST (geom);\n" % (
        pop_table, pop_table)
    query += "CREATE INDEX IF NOT EXISTS %s_geom_gist ON %s USING GIST (geom);\n" % (
        blt_table, blt_table)
    if debug >= 3:
        print(query)
    executeQuery(connection, query)

    print(cyan + "populationVulnerability() : " + bold + green +
          "ETAPE 1/5 - Fin de la préparation de la base de données PostGIS." +
          endC + '\n')

    #############
    # Etape 2/5 # Préparation des données
    #############

    print(cyan + "populationVulnerability() : " + bold + green +
          "ETAPE 2/5 - Début de la préparation des données." + endC + '\n')

    # Préparation donnée bâti = sélection à la zone d'étude + suppression du bâti non-habitation + MAJ champ hauteur + ajout champ surface habitable
    select_query = ""
    for blt_field in blt_fields_to_keep:
        select_query += "b.%s, " % blt_field
    query = "DROP TABLE IF EXISTS %s;\n" % clean_blt_table
    query += "CREATE TABLE %s AS\n" % clean_blt_table
    query += "    SELECT %s, b.geom\n" % select_query[:-2]
    query += "    FROM (\n"
    query += "        SELECT *\n"
    query += "        FROM %s\n" % blt_table
    query += "        WHERE %s\n" % built_sql_filter
    query += "        ) AS b, %s AS f\n" % ftp_table
    query += "    WHERE ST_Intersects(b.geom, f.geom);\n"
    query += "CREATE INDEX IF NOT EXISTS %s_geom_gist ON %s USING GIST (geom);\n" % (
        clean_blt_table, clean_blt_table)
    query += "UPDATE %s SET %s = 3 WHERE %s < 3;\n" % (
        clean_blt_table, height_field, height_field)
    query += "ALTER TABLE %s ADD COLUMN %s NUMERIC(12,6);\n" % (
        clean_blt_table, built_area_field)
    query += "UPDATE %s SET %s = ST_Area(geom);\n" % (clean_blt_table,
                                                      built_area_field)
    query += "ALTER TABLE %s ADD COLUMN %s NUMERIC(12,6);\n" % (
        clean_blt_table, floor_area_field)
    query += "UPDATE %s SET %s = ((%s * %s) / 3);\n" % (
        clean_blt_table, floor_area_field, height_field, built_area_field)
    if debug >= 3:
        print(query)
    executeQuery(connection, query)
    blt_fields_to_keep.append(built_area_field)
    blt_fields_to_keep.append(floor_area_field)

    # Préparation donnée population = sélection à la zone d'étude + nettoyage de champs
    select_query = ""
    for pop_field in pop_fields_to_keep:
        select_query += "p.%s, " % pop_field
    query = "DROP TABLE IF EXISTS %s;\n" % clean_pop_table
    query += "CREATE TABLE %s AS\n" % clean_pop_table
    query += "    SELECT %s, p.geom\n" % select_query[:-2]
    query += "    FROM %s AS p, %s AS f\n" % (pop_table, ftp_table)
    query += "    WHERE ST_Intersects(p.geom, f.geom);\n"
    query += "CREATE INDEX IF NOT EXISTS %s_geom_gist ON %s USING GIST (geom);\n" % (
        clean_pop_table, clean_pop_table)
    if debug >= 3:
        print(query)
    executeQuery(connection, query)

    print(cyan + "populationVulnerability() : " + bold + green +
          "ETAPE 2/5 - Fin de la préparation des données." + endC + '\n')

    #############
    # Etape 3/5 # Intersect de données
    #############

    print(cyan + "populationVulnerability() : " + bold + green +
          "ETAPE 3/5 - Début de l'intersect de données." + endC + '\n')

    # Calcul du prorata de population par bâtiment
    select_query = ""
    for blt_field in blt_fields_to_keep:
        select_query += "b.%s, " % blt_field
    for pop_field in pop_fields_to_keep:
        select_query += "p.%s, " % pop_field
    query = "DROP TABLE IF EXISTS %s;\n" % inter_pop_blt_table
    query += "CREATE TABLE %s AS\n" % inter_pop_blt_table
    query += "    SELECT %s, ST_Intersection(b.geom, p.geom) AS geom\n" % select_query[:
                                                                                       -2]
    query += "    FROM %s AS b, %s AS p\n" % (clean_blt_table, clean_pop_table)
    query += "    WHERE ST_Intersects(b.geom, p.geom);\n"
    query += "CREATE INDEX IF NOT EXISTS %s_geom_gist ON %s USING GIST (geom);\n" % (
        inter_pop_blt_table, inter_pop_blt_table)
    query += "ALTER TABLE %s ADD COLUMN %s NUMERIC(12,6);\n" % (
        inter_pop_blt_table, inter_area_field)
    query += "UPDATE %s SET %s = ST_Area(geom);\n" % (inter_pop_blt_table,
                                                      inter_area_field)
    for pop_field in pop_fields_to_treat:
        query += "UPDATE %s SET %s = (%s * (%s / %s));\n" % (
            inter_pop_blt_table, pop_field, pop_field, inter_area_field,
            built_area_field)
    if debug >= 3:
        print(query)
    executeQuery(connection, query)

    # Somme de population par polygone division
    select_query = "SUM(i.%s) AS %s, " % (floor_area_field, floor_area_field)
    for pop_field in pop_fields_to_treat:
        select_query += "SUM(i.%s) AS %s, " % (pop_field, pop_field)
    query = "DROP TABLE IF EXISTS %s;\n" % inter_popblt_div_table
    query += "CREATE TABLE %s AS\n" % inter_popblt_div_table
    query += "    SELECT d.%s, %s, d.geom\n" % (id_div, select_query[:-2])
    query += "    FROM %s AS d, %s AS i\n" % (div_table, inter_pop_blt_table)
    query += "    WHERE ST_Intersects(d.geom, i.geom)\n"
    query += "    GROUP BY d.%s, d.geom;\n" % id_div
    query += "CREATE INDEX IF NOT EXISTS %s_geom_gist ON %s USING GIST (geom);\n" % (
        inter_popblt_div_table, inter_popblt_div_table)
    if debug >= 3:
        print(query)
    executeQuery(connection, query)

    print(cyan + "populationVulnerability() : " + bold + green +
          "ETAPE 3/5 - Fin de l'intersect de données." + endC + '\n')

    #############
    # Etape 4/5 # Préparation du fichier final
    #############

    print(cyan + "populationVulnerability() : " + bold + green +
          "ETAPE 4/5 - Début de la préparation du fichier final." + endC +
          '\n')

    # Nouvelle table avec les géométries sans données population
    query = "DROP TABLE IF EXISTS %s;\n" % div_no_pop_table
    query += "CREATE TABLE %s AS\n" % div_no_pop_table
    query += "    SELECT DISTINCT %s, geom\n" % id_div
    query += "    FROM %s\n" % div_table
    query += "    WHERE %s NOT IN\n" % id_div
    query += "        (SELECT DISTINCT %s\n" % id_div
    query += "        FROM %s);\n" % inter_popblt_div_table
    query += "CREATE INDEX IF NOT EXISTS %s_geom_gist ON %s USING GIST (geom);\n" % (
        div_no_pop_table, div_no_pop_table)
    query += "ALTER TABLE %s ADD COLUMN %s NUMERIC(12,6) DEFAULT 0;\n" % (
        div_no_pop_table, floor_area_field)
    for pop_field in pop_fields_to_treat:
        query += "ALTER TABLE %s ADD COLUMN %s NUMERIC(8,6) DEFAULT 0;\n" % (
            div_no_pop_table, pop_field)
    if debug >= 3:
        print(query)
    executeQuery(connection, query)

    # Union de tables pour reconstituer la table finale
    query = "DROP TABLE IF EXISTS %s;\n" % vuln_table
    query += "CREATE TABLE %s AS\n" % vuln_table
    query += "    SELECT %s, %s, %s, geom\n" % (id_div, floor_area_field,
                                                pop_fields_to_treat_str)
    query += "    FROM %s\n" % inter_popblt_div_table
    query += "    UNION\n"
    query += "    SELECT %s, %s, %s, geom\n" % (id_div, floor_area_field,
                                                pop_fields_to_treat_str)
    query += "    FROM %s;\n" % div_no_pop_table
    query += "CREATE INDEX IF NOT EXISTS %s_geom_gist ON %s USING GIST (geom);\n" % (
        vuln_table, vuln_table)
    if debug >= 3:
        print(query)
    executeQuery(connection, query)

    # Standardisation des données
    for pop_field in pop_fields_to_treat:
        pop_stand_field = PREFIX_STAND + pop_field[:8]
        cursor.execute("SELECT min(%s) FROM %s;" % (pop_field, vuln_table))
        min_value = cursor.fetchone()
        cursor.execute("SELECT max(%s) FROM %s;" % (pop_field, vuln_table))
        max_value = cursor.fetchone()
        pop_stand_calc = "((%s - %s) / (%s - %s))" % (
            pop_field, min_value[0], max_value[0], min_value[0])
        query = "ALTER TABLE %s ADD COLUMN %s NUMERIC(8,6);\n" % (
            vuln_table, pop_stand_field)
        query += "UPDATE %s SET %s = %s;\n" % (vuln_table, pop_stand_field,
                                               pop_stand_calc)
        if debug >= 3:
            print(query)
        executeQuery(connection, query)

    print(cyan + "populationVulnerability() : " + bold + green +
          "ETAPE 4/5 - préparation du fichier final." + endC + '\n')

    #############
    # Etape 5/5 # Calcul des indicateurs
    #############

    print(cyan + "populationVulnerability() : " + bold + green +
          "ETAPE 5/5 - Début du calcul des indicateurs." + endC + '\n')

    # Calcul indice enjeu
    stake_stand_field = PREFIX_STAND + stake_field[:8]
    cursor.execute("SELECT max(%s) FROM %s;" % (stake_stand_field, vuln_table))
    max_enjeu = cursor.fetchone()
    query = "ALTER TABLE %s ADD COLUMN %s NUMERIC(8,6);\n" % (vuln_table,
                                                              STAKE_IND_FIELD)
    query += "UPDATE %s SET %s = (%s / %s);\n" % (
        vuln_table, STAKE_IND_FIELD, stake_stand_field, max_enjeu[0])
    if debug >= 3:
        print(query)
    executeQuery(connection, query)

    # Indice de vulnérabilité sanitaire
    health_vuln_pop_sum = ""
    for health_vuln_field in health_vuln_field_list:
        pop_stand_field = PREFIX_STAND + health_vuln_field[:8]
        health_vuln_pop_sum += "%s + " % pop_stand_field
    health_vuln_pop_sum = health_vuln_pop_sum[:-3]
    cursor.execute("SELECT max(%s) FROM %s;" %
                   (health_vuln_pop_sum, vuln_table))
    max_health_vuln_pop_sum = cursor.fetchone()
    query = "ALTER TABLE %s ADD COLUMN %s NUMERIC(8,6);\n" % (
        vuln_table, HEALTH_VULN_IND_FIELD)
    query += "UPDATE %s SET %s = ((%s) / %s);\n" % (
        vuln_table, HEALTH_VULN_IND_FIELD, health_vuln_pop_sum,
        max_health_vuln_pop_sum[0])
    if debug >= 3:
        print(query)
    executeQuery(connection, query)

    # Indice de vulnérabilité sociale
    social_vuln_pop_sum = ""
    for social_vuln_field in social_vuln_field_list:
        pop_stand_field = PREFIX_STAND + social_vuln_field[:8]
        social_vuln_pop_sum += "%s + " % pop_stand_field
    social_vuln_pop_sum = social_vuln_pop_sum[:-3]
    cursor.execute("SELECT max(%s) FROM %s;" %
                   (social_vuln_pop_sum, vuln_table))
    max_social_vuln_pop_sum = cursor.fetchone()
    query = "ALTER TABLE %s ADD COLUMN %s NUMERIC(8,6);\n" % (
        vuln_table, SOCIAL_VULN_IND_FIELD)
    query += "UPDATE %s SET %s = ((%s) / %s);\n" % (
        vuln_table, SOCIAL_VULN_IND_FIELD, social_vuln_pop_sum,
        max_social_vuln_pop_sum[0])
    if debug >= 3:
        print(query)
    executeQuery(connection, query)

    # Indice de vulnérabilité globale
    cursor.execute("SELECT max(%s + %s) FROM %s;" %
                   (HEALTH_VULN_IND_FIELD, SOCIAL_VULN_IND_FIELD, vuln_table))
    max_pop_vuln_glo_sum = cursor.fetchone()
    query = "ALTER TABLE %s ADD COLUMN %s NUMERIC(8,6);\n" % (
        vuln_table, GLOBAL_VULN_IND_FIELD)
    query += "UPDATE %s SET %s = ((%s + %s) / %s);\n" % (
        vuln_table, GLOBAL_VULN_IND_FIELD, HEALTH_VULN_IND_FIELD,
        SOCIAL_VULN_IND_FIELD, max_pop_vuln_glo_sum[0])
    if debug >= 3:
        print(query)
    executeQuery(connection, query)

    print(cyan + "populationVulnerability() : " + bold + green +
          "ETAPE 5/5 - Fin du calcul des indicateurs." + endC + '\n')

    closeConnection(connection)
    exportVectorByOgr2ogr(postgis_database_name,
                          output_vulnerability,
                          vuln_table,
                          user_name=postgis_user_name,
                          password=postgis_password,
                          ip_host=postgis_ip_host,
                          num_port=postgis_num_port,
                          schema_name=postgis_schema_name,
                          format_type=format_vector)

    ####################################################################

    # Suppression des fichiers temporaires
    if not save_results_intermediate:
        if debug >= 3:
            print(cyan + "populationVulnerability() : " + endC +
                  "Suppression des fichiers temporaires." + endC + '\n')
        dropDatabase(postgis_database_name,
                     user_name=postgis_user_name,
                     password=postgis_password,
                     ip_host=postgis_ip_host,
                     num_port=postgis_num_port,
                     schema_name=postgis_schema_name)

    print(cyan + "populationVulnerability() : " + bold + green +
          "FIN DES TRAITEMENTS" + endC + '\n')

    # Mise à jour du log
    ending_event = "populationVulnerability() : Fin du traitement : "
    timeLine(path_time_log, ending_event)

    return 0
Exemplo n.º 16
0
        if not enter_with_mask and (indicators_method == "BD_exogenes" or indicators_method == "SI_seuillage"):
            print(bold + "    Seuil de NDVI pour l'extraction de la végétation : " + endC + str(threshold_ndvi))
            if indicators_method == "SI_seuillage":
                print(bold + "    Seuil de NDVI pour l'extraction de l'eau : " + endC + str(threshold_ndvi_water))
                print(bold + "    Seuil de NDWI2 pour l'extraction de l'eau : " + endC + str(threshold_ndwi2))
                print(bold + "    Seuil inférieur du BI pour l'extraction du sol nu : " + endC + str(threshold_bi_bottom))
                print(bold + "    Seuil supérieur du BI pour l'extraction du sol nu : " + endC + str(threshold_bi_top))

        print("\n")

        ######################################################
        ### Préparations diverses en amont des traitements ###
        ######################################################

        if os.path.exists(ucz_output):
            removeVectorFile(ucz_output)

        temp_directory = os.path.dirname(ucz_output) + os.sep + "TEMP"

        if os.path.exists(temp_directory):
            print(bold + "Des données temporaires existent, veillez à les conserver si vous reprenez un traitement en cours..." + endC)
            clean_temp = raw_input(bold + blue + "Nettoyer le dossier temporaire ? (Y/N) :" + endC + " ")
            while clean_temp not in ("Y","N"):
                print(bold + yellow + "Attention : veuillez répondre par Y ou N." + endC)
                clean_temp = raw_input(bold + blue + "Nettoyer le dossier temporaire ? (Y/N) :" + endC + " ")
            print("\n")
            if clean_temp == "Y":
                shutil.rmtree(temp_directory)

        if not os.path.exists(temp_directory):
            try:
def rastersPreparation(emprise_file, classif_input, mns_input, mnh_input, classif_output, mns_output, mnh_output, epsg, no_data_value, path_time_log, format_raster='GTiff', format_vector='ESRI Shapefile', extension_raster=".tif", extension_vector=".shp", save_results_intermediate=False, overwrite=True):

    print(bold + yellow + "Début de la préparation des fichiers rasters.\n" + endC)
    timeLine(path_time_log, "Début de la préparation des fichiers rasters : ")

    if debug >= 3 :
        print(bold + green + "rastersPreparation() : Variables dans la fonction" + endC)
        print(cyan + "rastersPreparation() : " + endC + "emprise_file : " + str(emprise_file) + endC)
        print(cyan + "rastersPreparation() : " + endC + "classif_input : " + str(classif_input) + endC)
        print(cyan + "rastersPreparation() : " + endC + "mns_input : " + str(mns_input) + endC)
        print(cyan + "rastersPreparation() : " + endC + "mnh_input : " + str(mnh_input) + endC)
        print(cyan + "rastersPreparation() : " + endC + "classif_output : " + str(classif_output) + endC)
        print(cyan + "rastersPreparation() : " + endC + "mns_output : " + str(mns_output) + endC)
        print(cyan + "rastersPreparation() : " + endC + "mnh_output : " + str(mnh_output) + endC)
        print(cyan + "rastersPreparation() : " + endC + "epsg : " + str(epsg))
        print(cyan + "rastersPreparation() : " + endC + "no_data_value : " + str(no_data_value))
        print(cyan + "rastersPreparation() : " + endC + "path_time_log : " + str(path_time_log) + endC)
        print(cyan + "rastersPreparation() : " + endC + "format_raster : " + str(format_raster) + endC)
        print(cyan + "rastersPreparation() : " + endC + "format_vector : " + str(format_vector) + endC)
        print(cyan + "rastersPreparation() : " + endC + "extension_raster : " + str(extension_raster) + endC)
        print(cyan + "rastersPreparation() : " + endC + "extension_vector : " + str(extension_vector) + endC)
        print(cyan + "rastersPreparation() : " + endC + "save_results_intermediate : " + str(save_results_intermediate) + endC)
        print(cyan + "rastersPreparation() : " + endC + "overwrite : " + str(overwrite) + endC)

    SUFFIX_VECTOR_BUFF = '_buff'

    if not os.path.exists(classif_output) or not os.path.exists(mns_output) or not os.path.exists(mnh_output) or overwrite:

        ############################################
        ### Préparation générale des traitements ###
        ############################################

        if not os.path.exists(os.path.dirname(classif_output)):
            os.makedirs(os.path.dirname(classif_output))
        if not os.path.exists(os.path.dirname(mns_output)):
            os.makedirs(os.path.dirname(mns_output))
        if not os.path.exists(os.path.dirname(mnh_output)):
            os.makedirs(os.path.dirname(mnh_output))

        emprise_buffer = os.path.splitext(emprise_file)[0] + SUFFIX_VECTOR_BUFF + extension_vector
        buffer_dist = 10
        bufferVector(emprise_file, emprise_buffer, buffer_dist, "", 1.0, 10, format_vector)

        ###################################
        ### Traitements sur les rasters ###
        ###################################

        images_input_list = [classif_input, mns_input, mnh_input]
        images_output_list = [classif_output, mns_output, mnh_output]

        cutRasterImages(images_input_list, emprise_buffer, images_output_list, 0, 0, epsg, no_data_value, "", False, path_time_log, format_raster, format_vector, extension_raster, extension_vector, save_results_intermediate, overwrite)

        ##########################################
        ### Nettoyage des fichiers temporaires ###
        ##########################################

        if not save_results_intermediate:
            if os.path.exists(emprise_buffer):
                removeVectorFile(emprise_buffer)

    else:
        print(bold + magenta + "La préparation des fichiers rasters a déjà eu lieu.\n" + endC)

    print(bold + yellow + "Fin de la préparation des fichiers rasters.\n" + endC)
    timeLine(path_time_log, "Fin de la préparation des fichiers rasters : ")

    return
Exemplo n.º 18
0
def heightOfRoughnessElements(grid_input, grid_output, built_input, height_field, id_field, epsg, project_encoding, server_postgis, port_number, user_postgis, password_postgis, database_postgis, schema_postgis, path_time_log, format_vector='ESRI Shapefile', save_results_intermediate=False, overwrite=True):

    print(bold + yellow + "Début du calcul de l'indicateur Height of Roughness Elements." + endC + "\n")
    timeLine(path_time_log, "Début du calcul de l'indicateur Height of Roughness Elements : ")

    if debug >= 3:
        print(bold + green + "heightOfRoughnessElements() : Variables dans la fonction" + endC)
        print(cyan + "heightOfRoughnessElements() : " + endC + "grid_input : " + str(grid_input) + endC)
        print(cyan + "heightOfRoughnessElements() : " + endC + "grid_output : " + str(grid_output) + endC)
        print(cyan + "heightOfRoughnessElements() : " + endC + "built_input : " + str(built_input) + endC)
        print(cyan + "heightOfRoughnessElements() : " + endC + "height_field : " + str(height_field) + endC)
        print(cyan + "heightOfRoughnessElements() : " + endC + "id_field : " + str(id_field) + endC)
        print(cyan + "heightOfRoughnessElements() : " + endC + "epsg : " + str(epsg) + endC)
        print(cyan + "heightOfRoughnessElements() : " + endC + "project_encoding : " + str(project_encoding) + endC)
        print(cyan + "heightOfRoughnessElements() : " + endC + "server_postgis : " + str(server_postgis) + endC)
        print(cyan + "heightOfRoughnessElements() : " + endC + "port_number : " + str(port_number) + endC)
        print(cyan + "heightOfRoughnessElements() : " + endC + "user_postgis : " + str(user_postgis) + endC)
        print(cyan + "heightOfRoughnessElements() : " + endC + "password_postgis : " + str(password_postgis) + endC)
        print(cyan + "heightOfRoughnessElements() : " + endC + "database_postgis : " + str(database_postgis) + endC)
        print(cyan + "heightOfRoughnessElements() : " + endC + "schema_postgis : " + str(schema_postgis) + endC)
        print(cyan + "heightOfRoughnessElements() : " + endC + "path_time_log : " + str(path_time_log) + endC)
        print(cyan + "heightOfRoughnessElements() : " + endC + "format_vector : " + str(format_vector) + endC)
        print(cyan + "heightOfRoughnessElements() : " + endC + "save_results_intermediate : " + str(save_results_intermediate) + endC)
        print(cyan + "heightOfRoughnessElements() : " + endC + "overwrite : " + str(overwrite) + endC)
        print("\n")

    if not os.path.exists(grid_output) or overwrite:

        ############################################
        ### Préparation générale des traitements ###
        ############################################

        print(bold + cyan + "Préparation au calcul de Height of Roughness Elements :" + endC)
        timeLine(path_time_log, "    Préparation au calcul de Height of Roughness Elements : ")

        if os.path.exists(grid_output):
            removeVectorFile(grid_output)

        # Création de la base de données PostGIS
        # ~ dropDatabase(database_postgis, user_name=user_postgis, password=password_postgis, ip_host=server_postgis, num_port=port_number, schema_name=schema_postgis) # Conflits avec autres indicateurs (Aspect Ratio / Terrain Roughness Class)
        createDatabase(database_postgis, user_name=user_postgis, password=password_postgis, ip_host=server_postgis, num_port=port_number, schema_name=schema_postgis)

        # Import des fichiers shapes maille et bati dans la base de données PostGIS
        table_name_maille = importVectorByOgr2ogr(database_postgis, grid_input, 'hre_maille', user_name=user_postgis, password=password_postgis, ip_host=server_postgis, num_port=str(port_number), schema_name=schema_postgis, epsg=str(epsg), codage=project_encoding)
        table_name_bati = importVectorByOgr2ogr(database_postgis, built_input, 'hre_bati', user_name=user_postgis, password=password_postgis, ip_host=server_postgis, num_port=str(port_number), schema_name=schema_postgis, epsg=str(epsg), codage=project_encoding)

        # Gestion de l'ID
        connection = openConnection(database_postgis, user_name=user_postgis, password=password_postgis, ip_host=server_postgis, num_port=port_number, schema_name=schema_postgis)
        attr_names_list = getAttributeNameList(built_input, format_vector=format_vector)
        if id_field not in attr_names_list:
            id_field = 'ogc_fid'
            # ~ id_query = "ALTER TABLE %s ADD COLUMN %s SERIAL PRIMARY KEY" % (table_name_bati, id_field)
            # ~ executeQuery(connection, id_query)

        ###############################################
        ### Calcul de l'indicateur par requêtes SQL ###
        ###############################################

        print(bold + cyan + "Calcul de Height of Roughness Elements :" + endC)
        timeLine(path_time_log, "    Calcul de Height of Roughness Elements : ")

        # Création des index spatiaux (accélère les requêtes spatiales)
        query = """
        --CREATE INDEX IF NOT EXISTS maille_geom_gist ON %s USING GIST (geom);
        --CREATE INDEX IF NOT EXISTS bati_geom_gist ON %s USING GIST (geom);
        """ % (table_name_maille, table_name_bati)

        # Intersect entre les tables maille et bâti (pour chaque maille, on récupère le bâti qui intersect)
        query += """
        DROP TABLE IF EXISTS hre_decoup;
        CREATE TABLE hre_decoup AS
            SELECT b.%s as ID, b.%s as hauteur, ST_Intersection(b.geom, m.geom) as geom
            FROM %s as b, %s as m
            WHERE ST_Intersects(b.geom, m.geom)
                AND (ST_GeometryType(b.geom) = 'ST_Polygon' OR ST_GeometryType(b.geom) = 'ST_MultiPolygon')
                AND (ST_GeometryType(m.geom) = 'ST_Polygon' OR ST_GeometryType(m.geom) = 'ST_MultiPolygon');
        CREATE INDEX IF NOT EXISTS decoup_geom_gist ON hre_decoup USING GIST (geom);
        """ % (id_field, height_field, table_name_bati, table_name_maille)

        # Table intermédiaire de calculs d'indicateurs secondaires
        query += """
        DROP TABLE IF EXISTS hre_temp;
        CREATE TABLE hre_temp AS
            SELECT d.ID, st_area(d.geom) as surface, (st_area(d.geom) * d.hauteur) as volume, d.geom as geom
            FROM hre_decoup as d;
        CREATE INDEX IF NOT EXISTS temp_geom_gist ON hre_temp USING GIST (geom);
        """

        # Table intermédiaire de calcul de mean_h seulement pour les mailles intersectant du bâti
        query += """
        DROP TABLE IF EXISTS hre_maille_bis;
        CREATE TABLE hre_maille_bis AS
            SELECT m.ID as ID, ((sum(t.volume) / count(t.geom)) / (sum(t.surface) / count(t.geom))) as mean_h, m.geom as geom
            FROM %s as m, hre_temp as t
            WHERE ST_Intersects(m.geom, t.geom)
                AND (ST_GeometryType(m.geom) = 'ST_Polygon' OR ST_GeometryType(m.geom) = 'ST_MultiPolygon')
                AND (ST_GeometryType(t.geom) = 'ST_Polygon' OR ST_GeometryType(t.geom) = 'ST_MultiPolygon')
            GROUP BY m.ID, m.geom;
        CREATE INDEX IF NOT EXISTS maille_bis_geom_gist ON hre_maille_bis USING GIST (geom);
        """ % (table_name_maille)

        # Table intermédiaire seulement pour les mailles n'intersectant pas de bâti (par défaut, mean_h = 0)
        query += """
        DROP TABLE IF EXISTS hre_maille_ter;
        CREATE TABLE hre_maille_ter AS
            SELECT DISTINCT ID as ID, geom as geom
            FROM %s
            WHERE ID NOT IN
                (SELECT DISTINCT ID
                FROM hre_maille_bis);
        ALTER TABLE hre_maille_ter ADD mean_h DOUBLE PRECISION;
        UPDATE hre_maille_ter SET mean_h = 0;
        CREATE INDEX IF NOT EXISTS maille_ter_geom_gist ON hre_maille_ter USING GIST (geom);
        """ % (table_name_maille)

        # Union des 2 tables précédentes pour récupérer l'ensemble des polygones maille de départ
        query += """
        DROP TABLE IF EXISTS hre_height;
        CREATE TABLE hre_height AS
            SELECT ID, mean_h, geom
            FROM hre_maille_bis
            UNION
            SELECT ID, mean_h, geom
            FROM hre_maille_ter;
        ALTER TABLE hre_height ALTER COLUMN ID TYPE INTEGER;
        """

        # Exécution de la requête SQL
        if debug >= 1:
            print(query)
        executeQuery(connection, query)
        closeConnection(connection)

        # Export en shape de la table contenant l'indicateur calculé
        exportVectorByOgr2ogr(database_postgis, grid_output, 'hre_height', user_name=user_postgis, password=password_postgis, ip_host=server_postgis, num_port=str(port_number), schema_name=schema_postgis, format_type=format_vector)

        ##########################################
        ### Nettoyage des fichiers temporaires ###
        ##########################################

        if not save_results_intermediate:
            # ~ dropDatabase(database_postgis, user_name=user_postgis, password=password_postgis, ip_host=server_postgis, num_port=port_number, schema_name=schema_postgis) # Conflits avec autres indicateurs (Aspect Ratio / Terrain Roughness Class)
            pass

    else:
        print(bold + magenta + "Le calcul de Height of Roughness Elements a déjà eu lieu." + endC)

    print(bold + yellow + "Fin du calcul de l'indicateur Height of Roughness Elements." + endC + "\n")
    timeLine(path_time_log, "Fin du calcul de l'indicateur Height of Roughness Elements : ")

    return
def polygonMerToTDC(input_im_ndvi_dico, output_dir, input_sea_points, fct_bin_mask_vect, simplif, input_cut_vector, buf_pos, buf_neg, no_data_value, path_time_log, epsg=2154, format_vector="ESRI Shapefile", extension_raster=".tif", extension_vector=".shp", save_results_intermediate=True, overwrite=True):

    # Mise à jour du Log
    starting_event = "PolygonMerToTDC() : Select PolygonMerToTDC starting : "
    timeLine(path_time_log,starting_event)

    # Affichage des paramètres
    if debug >= 3:
        print(bold + green + "Variables dans PolygonMerToTDC - Variables générales" + endC)
        print(cyan + "polygonMerToTDC() : " + endC + "input_im_ndvi_dico : " + str(input_im_ndvi_dico) + endC)
        print(cyan + "polygonMerToTDC() : " + endC + "output_dir : " + str(output_dir) + endC)
        print(cyan + "polygonMerToTDC() : " + endC + "input_sea_points : " + str(input_sea_points) + endC)
        print(cyan + "polygonMerToTDC() : " + endC + "fct_bin_mask_vect : " + str(fct_bin_mask_vect) + endC)
        print(cyan + "polygonMerToTDC() : " + endC + "simplif : " + str(simplif) + endC)
        print(cyan + "polygonMerToTDC() : " + endC + "input_cut_vector : " + str(input_cut_vector) + endC)
        print(cyan + "polygonMerToTDC() : " + endC + "buf_pos : " + str(buf_pos) + endC)
        print(cyan + "polygonMerToTDC() : " + endC + "buf_neg : " + str(buf_neg) + endC)
        print(cyan + "polygonMerToTDC() : " + endC + "no_data_value : " + str(no_data_value) + endC)
        print(cyan + "polygonMerToTDC() : " + endC + "path_time_log : " + str(path_time_log) + endC)
        print(cyan + "polygonMerToTDC() : " + endC + "epsg : " + str(epsg) + endC)
        print(cyan + "polygonMerToTDC() : " + endC + "format_vector : " + str(format_vector) + endC)
        print(cyan + "polygonMerToTDC() : " + endC + "extension_raster : " + str(extension_raster) + endC)
        print(cyan + "polygonMerToTDC() : " + endC + "extension_vector : " + str(extension_vector) + endC)
        print(cyan + "polygonMerToTDC() : " + endC + "save_results_intermediate : " + str(save_results_intermediate) + endC)
        print(cyan + "polygonMerToTDC() : " + endC + "overwrite : " + str(overwrite) + endC)

    # Constantes
    REP_TEMP_POLY_MER = "Temp_PolygonMerToTDC_"
    CODAGE_8B = "uint8"

    # Création du répertoire de sortie s'il n'existe pas déjà
    if not os.path.exists(output_dir):
        os.makedirs(output_dir)

    # Pour toutes les images à traiter
    for r in range(len(input_im_ndvi_dico.split())):
        # L'image à traiter
        input_image = input_im_ndvi_dico.split()[r].split(":")[0]
        image_name = os.path.splitext(os.path.basename(input_image))[0]

        # Création du répertoire temporaire de calcul
        repertory_temp = output_dir + os.sep + REP_TEMP_POLY_MER + image_name
        if not os.path.exists(repertory_temp):
            os.makedirs(repertory_temp)

        if not os.path.exists(input_image):
            print(cyan + "polygonMerToTDC() : " + bold + red + "L'image en entrée : " + input_image + " n'existe pas. Vérifiez le chemin !" + endC, file=sys.stderr)
            sys.exit(1)

        src_input_im = gdal.Open(input_image)
        if src_input_im is None:
            print(cyan + "polygonMerToTDC() : " + bold + red + "Impossible d'ouvrir l'image raster : " + input_image + endC, file=sys.stderr)
            sys.exit(1)
        try :
            srcband = src_input_im.GetRasterBand(2)
        except RuntimeError as err:
            print(cyan + "polygonMerToTDC() : " + bold + red + "Pas de bande 2 trouvée sur : " + input_image + endC, file=sys.stderr)
            e = "OS error: {0}".format(err)
            print(e, file=sys.stderr)
            sys.exit(1)

        # Traiter le cas où pas de NDVI derrière
        if ":" not in input_im_ndvi_dico.split()[r]:
            print(cyan + "polygonMerToTDC() : " + red + bold + "Aucun masque binaire vectorisé spécifié ! Nécessité d'au moins un par image." + endC, file=sys.stderr)
            sys.exit(1)

        # Parcours de toutes les images NDVI correspondant à chaque image
        for ndvi_mask_vect in input_im_ndvi_dico.split()[r].split(":")[1].split(","):
            if debug > 2 :
                print(cyan + "polygonMerToTDC() : " + endC + "Traitement de : " + ndvi_mask_vect)

            # Initialisation des noms des fichiers de sortie
            binary_mask_zeros = repertory_temp + os.sep +  "b_mask_zeros_" + os.path.splitext(os.path.basename(input_image))[0] + extension_raster
            binary_mask_zeros_vector = "b_mask_zeros_vect_" + os.path.splitext(os.path.basename(input_image))[0]
            path_binary_mask_zeros_vector = repertory_temp + os.sep +  binary_mask_zeros_vector + extension_vector
            true_values_buffneg = repertory_temp + os.sep + "true_values_buffneg_" + os.path.splitext(os.path.basename(input_image))[0] + extension_vector
            decoup_buffneg = repertory_temp + os.sep + "decoupe_buffneg_" + os.path.splitext(os.path.basename(input_cut_vector))[0] + extension_vector

            if fct_bin_mask_vect :
                threshold = os.path.splitext(os.path.basename(ndvi_mask_vect))[0].split("_")[-2] + "_" + os.path.splitext(os.path.basename(ndvi_mask_vect))[0].split("_")[-1]
                poly_mer_shp = repertory_temp + os.sep + "poly_mer_" + os.path.splitext(os.path.basename(input_image))[0] + "_" + str(threshold) + extension_vector
                poly_mer_shp_dilat = repertory_temp + os.sep + "poly_mer_dilat_" + os.path.splitext(os.path.basename(input_image))[0] + "_" + str(threshold) + extension_vector
                poly_mer_shp_ferm = repertory_temp + os.sep + "poly_mer_ferm_" + os.path.splitext(os.path.basename(input_image))[0] + "_" + str(threshold) + extension_vector
                polyline_mer = repertory_temp + os.sep + "polyline_mer_" + os.path.splitext(os.path.basename(input_image))[0] + "_" + str(threshold) + extension_vector
                polyline_tdc = repertory_temp + os.sep + "tdc_" + os.path.splitext(os.path.basename(input_image))[0] + "_" + str(threshold) + extension_vector
                polyline_tdc_simplif = repertory_temp + os.sep + "tdcs_" + os.path.splitext(os.path.basename(input_image))[0] + "_" + str(threshold) + extension_vector
                polyline_tdc_simplif_decoup = output_dir + os.sep + "tdcsd_" + os.path.splitext(os.path.basename(input_image))[0] + "_" + str(threshold) + extension_vector
            else :
                poly_mer_shp = repertory_temp + os.sep + "poly_mer_" + os.path.splitext(os.path.basename(ndvi_mask_vect))[0] + extension_vector
                poly_mer_shp_dilat = repertory_temp + os.sep + "poly_mer_dilat_" + os.path.splitext(os.path.basename(input_image))[0] + extension_vector
                poly_mer_shp_ferm = repertory_temp + os.sep + "poly_mer_ferm_" + os.path.splitext(os.path.basename(input_image))[0] + extension_vector
                polyline_mer = repertory_temp + os.sep + "polyline_mer_" + os.path.splitext(os.path.basename(ndvi_mask_vect))[0] + extension_vector
                polyline_tdc = repertory_temp + os.sep + "tdc_" + os.path.splitext(os.path.basename(ndvi_mask_vect))[0] + extension_vector
                polyline_tdc_simplif = repertory_temp + os.sep + "tdcs_" + os.path.splitext(os.path.basename(ndvi_mask_vect))[0] + extension_vector
                polyline_tdc_simplif_decoup = output_dir + os.sep + "tdcsd_" + os.path.splitext(os.path.basename(ndvi_mask_vect))[0] + extension_vector

            # Création shp poly_mer_shp contenant uniquement les polygones mer
            if os.path.exists(poly_mer_shp):
                removeVectorFile(poly_mer_shp)

            # Sélection des polygones qui sont de la mer, dans la liste poly_mer
            withinPolygons(input_sea_points, ndvi_mask_vect, poly_mer_shp, overwrite, format_vector)

            # Fermeture (dilatation - érosion) sur les polygones mer obtenus pour supprimer les petits trous dans les polygones (bateaux, ...) et rassembler les polygones proches
            bufferVector(poly_mer_shp, poly_mer_shp_dilat, buf_pos, "", 1.0, 10, format_vector)
            bufferVector(poly_mer_shp_dilat, poly_mer_shp_ferm, buf_neg, "", 1.0, 10, format_vector)

            # Création masque binaire pour séparer les no data des vraies valeurs
            no_data_ima = getNodataValueImage(input_image)
            if no_data_ima == None :
                no_data_ima = no_data_value
            createBinaryMaskMultiBand(input_image, binary_mask_zeros, no_data_ima, CODAGE_8B)

            # Vectorisation du masque binaire true data/false data -> polygone avec uniquement les vraies valeurs
            if os.path.exists(path_binary_mask_zeros_vector):
                removeVectorFile(path_binary_mask_zeros_vector)
            polygonizeRaster(binary_mask_zeros, path_binary_mask_zeros_vector, binary_mask_zeros_vector)

            # Buffer négatif sur ce polygone
            bufferVector(path_binary_mask_zeros_vector, true_values_buffneg, -2, "", 1.0, 10, format_vector)

            # Transformation des polygones de la couche poly_mer_shp_ferm en polyligne
            convertePolygon2Polylines(poly_mer_shp_ferm, polyline_mer, overwrite, format_vector)

            # Découpe du TDC polyline_mer avec le polygone négatif
            cutVectorAll(true_values_buffneg, polyline_mer, polyline_tdc, overwrite, format_vector)

            # Simplification du TDC
            simplifyVector(polyline_tdc, polyline_tdc_simplif, simplif, format_vector)

            # Buffer négatif autour de input_cut_vector
            bufferVector(input_cut_vector, decoup_buffneg, -1, "", 1.0, 10, format_vector)

            # Découpe du TDC polyline_mer avec le buffer négatif du polygone mer
            cutVectorAll(decoup_buffneg, polyline_tdc_simplif, polyline_tdc_simplif_decoup, overwrite, format_vector)

            tdc_final = polyline_tdc_simplif_decoup

        # Suppression du repertoire temporaire
        if not save_results_intermediate and os.path.exists(repertory_temp):
            shutil.rmtree(repertory_temp)

    # Mise à jour du Log
    ending_event = "PolygonMerToTDC() : Select PolygonMerToTDC ending: "
    timeLine(path_time_log,ending_event)

    return tdc_final
Exemplo n.º 20
0
def computeQualityIndiceRateQuantity(raster_input,
                                     vector_sample_input,
                                     repertory_output,
                                     base_name,
                                     geom,
                                     size_grid,
                                     pixel_size_x,
                                     pixel_size_y,
                                     field_value_verif,
                                     field_value_other,
                                     no_data_value,
                                     epsg,
                                     format_raster,
                                     format_vector,
                                     extension_raster,
                                     extension_vector,
                                     overwrite=True,
                                     save_results_intermediate=False):

    # Définition des constantes
    EXT_TXT = '.txt'
    SUFFIX_STUDY = '_study'
    SUFFIX_CUT = '_cut'
    SUFFIX_BUILD = '_build'
    SUFFIX_OTHER = '_other'
    SUFFIX_LOCAL = '_local'
    SUFFIX_MATRIX = '_matrix'

    FIELD_NAME_CLASSIF = "classif"
    FIELD_TYPE = ogr.OFTInteger

    # Les variables locales
    vector_local_study = repertory_output + os.sep + base_name + SUFFIX_LOCAL + SUFFIX_STUDY + extension_vector
    vector_local_cut_study = repertory_output + os.sep + base_name + SUFFIX_LOCAL + SUFFIX_CUT + SUFFIX_STUDY + extension_vector
    vector_local_cut_build = repertory_output + os.sep + base_name + SUFFIX_LOCAL + SUFFIX_CUT + SUFFIX_BUILD + extension_vector
    vector_local_cut_other = repertory_output + os.sep + base_name + SUFFIX_LOCAL + SUFFIX_CUT + SUFFIX_OTHER + extension_vector
    vector_local_cut = repertory_output + os.sep + base_name + SUFFIX_LOCAL + SUFFIX_CUT + extension_vector
    raster_local_cut = repertory_output + os.sep + base_name + SUFFIX_LOCAL + SUFFIX_CUT + extension_raster
    matrix_local_file = repertory_output + os.sep + base_name + SUFFIX_LOCAL + SUFFIX_CUT + SUFFIX_MATRIX + EXT_TXT

    class_ref_list = None
    class_pro_list = None
    rate_quantity_list = None
    matrix_origine = None
    kappa = 0.0
    overall_accuracy = 0.0

    # Netoyage les fichiers de travail local
    if os.path.isfile(vector_local_study):
        removeVectorFile(vector_local_study)
    if os.path.isfile(vector_local_cut_study):
        removeVectorFile(vector_local_cut_study)
    if os.path.isfile(vector_local_cut):
        removeVectorFile(vector_local_cut)
    if os.path.isfile(vector_local_cut_build):
        removeVectorFile(vector_local_cut_build)
    if os.path.isfile(vector_local_cut_other):
        removeVectorFile(vector_local_cut_other)
    if os.path.isfile(raster_local_cut):
        removeFile(raster_local_cut)
    if os.path.isfile(matrix_local_file):
        removeFile(matrix_local_file)

    # Creation d'un shape file de travail local
    polygon_attr_geom_dico = {"1": [geom, {}]}
    createPolygonsFromGeometryList({}, polygon_attr_geom_dico,
                                   vector_local_study, epsg, format_vector)

    # Découpe sur zone local d'étude du fichier vecteur de référence
    cutVector(vector_local_study, vector_sample_input, vector_local_cut_build,
              format_vector)
    differenceVector(vector_local_cut_build, vector_local_study,
                     vector_local_cut_other, format_vector)

    addNewFieldVector(vector_local_cut_build, FIELD_NAME_CLASSIF, FIELD_TYPE,
                      field_value_verif, None, None, format_vector)
    addNewFieldVector(vector_local_cut_other, FIELD_NAME_CLASSIF, FIELD_TYPE,
                      field_value_other, None, None, format_vector)
    input_shape_list = [vector_local_cut_build, vector_local_cut_other]
    fusionVectors(input_shape_list, vector_local_cut)

    # Découpe sur zone local d'étude du fichier rasteur de classification
    if not cutImageByVector(vector_local_study, raster_input, raster_local_cut,
                            pixel_size_x, pixel_size_y, no_data_value, 0,
                            format_raster, format_vector):
        return class_ref_list, class_pro_list, rate_quantity_list, kappa, overall_accuracy, matrix_origine

    # Calcul de la matrice de confusion
    computeConfusionMatrix(raster_local_cut, vector_local_cut, "",
                           FIELD_NAME_CLASSIF, matrix_local_file, overwrite)

    # lecture de la matrice de confusion
    matrix, class_ref_list, class_pro_list = readConfusionMatrix(
        matrix_local_file)
    matrix_origine = copy.deepcopy(matrix)

    if matrix == []:
        print(
            cyan + "computeQualityIndiceRateQuantity() : " + bold + yellow +
            "!!! Une erreur c'est produite au cours de la lecture de la matrice de confusion : "
            + matrix_local_file + ". Voir message d'erreur." + endC)
        matrix_origine = None
        return class_ref_list, class_pro_list, rate_quantity_list, kappa, overall_accuracy, matrix_origine

    # Correction de la matrice de confusion
    # Dans le cas ou le nombre de microclasses des échantillons de controles
    # et le nombre de microclasses de la classification sont différents
    class_missing_list = []
    if class_ref_list != class_pro_list:
        matrix, class_missing_list = correctMatrix(class_ref_list,
                                                   class_pro_list, matrix,
                                                   no_data_value)

    class_count = len(matrix[0]) - len(class_missing_list)

    # Calcul des indicateurs de qualité : rate_quantity_list
    precision_list, recall_list, fscore_list, performance_list, rate_false_positive_list, rate_false_negative_list, rate_quantity_list, class_list, overall_accuracy, overall_fscore, overall_performance, kappa = computeIndicators(
        class_count, matrix, class_ref_list, class_missing_list)

    # Chercher si une ligne no data existe si c'est le cas correction de la matrice
    if str(no_data_value) in class_pro_list:
        pos_col_nodata = class_pro_list.index(str(no_data_value))
        for line in matrix_origine:
            del line[pos_col_nodata]

        class_pro_list.remove(str(no_data_value))

    # Suppression des données temporaires locales
    if not save_results_intermediate:
        if os.path.isfile(vector_local_study):
            removeVectorFile(vector_local_study)

        if os.path.isfile(vector_local_cut_study):
            removeVectorFile(vector_local_cut_study)

        if os.path.isfile(vector_local_cut):
            removeVectorFile(vector_local_cut)

        if os.path.isfile(vector_local_cut_build):
            removeVectorFile(vector_local_cut_build)

        if os.path.isfile(vector_local_cut_other):
            removeVectorFile(vector_local_cut_other)

        if os.path.isfile(raster_local_cut):
            removeFile(raster_local_cut)

        if os.path.isfile(matrix_local_file):
            removeFile(matrix_local_file)

    return class_ref_list, class_pro_list, rate_quantity_list, kappa, overall_accuracy, matrix_origine
def computeMajorityClass(input_grid, temp_directory, nodata_field, built_field,
                         mineral_field, baresoil_field, water_field,
                         vegetation_field, high_vegetation_field,
                         low_vegetation_field, maj_ocs_field, veg_mean_field,
                         class_label_dico_out, format_vector, extension_vector,
                         overwrite):

    SUFFIX_CLASS = '_class'
    FIELD_TYPE = ogr.OFTInteger
    FIELD_NAME_MAJORITY = 'majority'

    temp_class_list = []

    base_name = os.path.splitext(os.path.basename(input_grid))[0]
    temp_grid = temp_directory + os.sep + base_name + SUFFIX_CLASS + extension_vector
    temp_class0 = temp_directory + os.sep + base_name + SUFFIX_CLASS + "0" + extension_vector
    temp_class1 = temp_directory + os.sep + base_name + SUFFIX_CLASS + "1" + extension_vector
    temp_class2 = temp_directory + os.sep + base_name + SUFFIX_CLASS + "2" + extension_vector
    temp_class3 = temp_directory + os.sep + base_name + SUFFIX_CLASS + "3" + extension_vector
    temp_class4 = temp_directory + os.sep + base_name + SUFFIX_CLASS + "4" + extension_vector

    ### Récupération de la classe majoritaire

    if debug >= 3:
        print(cyan + "computeMajorityClass() : " + endC + bold +
              "Récupération de la classe majoritaire." + endC + '\n')

    addNewFieldVector(input_grid,
                      maj_ocs_field,
                      FIELD_TYPE,
                      field_value=None,
                      field_width=None,
                      field_precision=None,
                      format_vector=format_vector)
    attr_names_list = getAttributeNameList(input_grid,
                                           format_vector=format_vector)
    attr_names_list_str = "'"
    for attr_name in attr_names_list:
        attr_names_list_str += attr_name + ', '
    attr_names_list_str = attr_names_list_str[:-2] + "'"

    expression = "%s = '%s' OR %s = '%s' OR %s = '%s' OR %s = '%s'" % (
        FIELD_NAME_MAJORITY, nodata_field, FIELD_NAME_MAJORITY, built_field,
        FIELD_NAME_MAJORITY, mineral_field, FIELD_NAME_MAJORITY, water_field)
    ret = filterSelectDataVector(input_grid,
                                 temp_class0,
                                 attr_names_list_str,
                                 expression,
                                 overwrite=overwrite,
                                 format_vector=format_vector)
    updateFieldVector(temp_class0,
                      field_name=maj_ocs_field,
                      value=class_label_dico_out["MAJ_OTHERS_CLASS"],
                      format_vector=format_vector)
    temp_class_list.append(temp_class0)

    expression = "%s = '%s'" % (FIELD_NAME_MAJORITY, baresoil_field)
    ret = filterSelectDataVector(input_grid,
                                 temp_class1,
                                 attr_names_list_str,
                                 expression,
                                 overwrite=overwrite,
                                 format_vector=format_vector)
    updateFieldVector(temp_class1,
                      field_name=maj_ocs_field,
                      value=class_label_dico_out["MAJ_BARESOIL_CLASS"],
                      format_vector=format_vector)
    temp_class_list.append(temp_class1)

    expression = "(%s = '%s' OR %s = '%s' OR %s = '%s') AND (%s < 1)" % (
        FIELD_NAME_MAJORITY, vegetation_field, FIELD_NAME_MAJORITY,
        low_vegetation_field, FIELD_NAME_MAJORITY, high_vegetation_field,
        veg_mean_field)
    ret = filterSelectDataVector(input_grid,
                                 temp_class2,
                                 attr_names_list_str,
                                 expression,
                                 overwrite=overwrite,
                                 format_vector=format_vector)
    updateFieldVector(temp_class2,
                      field_name=maj_ocs_field,
                      value=class_label_dico_out["MAJ_LOW_VEG_CLASS"],
                      format_vector=format_vector)
    temp_class_list.append(temp_class2)

    expression = "(%s = '%s' OR %s = '%s' OR %s = '%s') AND (%s >= 1 AND %s < 5)" % (
        FIELD_NAME_MAJORITY, vegetation_field, FIELD_NAME_MAJORITY,
        low_vegetation_field, FIELD_NAME_MAJORITY, high_vegetation_field,
        veg_mean_field, veg_mean_field)
    ret = filterSelectDataVector(input_grid,
                                 temp_class3,
                                 attr_names_list_str,
                                 expression,
                                 overwrite=overwrite,
                                 format_vector=format_vector)
    updateFieldVector(temp_class3,
                      field_name=maj_ocs_field,
                      value=class_label_dico_out["MAJ_MED_VEG_CLASS"],
                      format_vector=format_vector)
    temp_class_list.append(temp_class3)

    expression = "(%s = '%s' OR %s = '%s' OR %s = '%s') AND (%s >= 5)" % (
        FIELD_NAME_MAJORITY, vegetation_field, FIELD_NAME_MAJORITY,
        low_vegetation_field, FIELD_NAME_MAJORITY, high_vegetation_field,
        veg_mean_field)
    ret = filterSelectDataVector(input_grid,
                                 temp_class4,
                                 attr_names_list_str,
                                 expression,
                                 overwrite=overwrite,
                                 format_vector=format_vector)
    updateFieldVector(temp_class4,
                      field_name=maj_ocs_field,
                      value=class_label_dico_out["MAJ_HIGH_VEG_CLASS"],
                      format_vector=format_vector)
    temp_class_list.append(temp_class4)

    fusionVectors(temp_class_list, temp_grid, format_vector=format_vector)
    removeVectorFile(input_grid, format_vector=format_vector)
    copyVectorFile(temp_grid, input_grid, format_vector=format_vector)

    return 0
Exemplo n.º 22
0
def comparareClassificationToReferenceGrid(image_input,
                                           vector_cut_input,
                                           vector_sample_input,
                                           vector_grid_input,
                                           vector_grid_output,
                                           size_grid,
                                           field_value_verif,
                                           no_data_value,
                                           path_time_log,
                                           epsg=2154,
                                           format_raster='GTiff',
                                           format_vector="ESRI Shapefile",
                                           extension_raster=".tif",
                                           extension_vector=".shp",
                                           save_results_intermediate=False,
                                           overwrite=True):

    # Mise à jour du Log
    starting_event = "comparareClassificationToReferenceGrid() : starting : "
    timeLine(path_time_log, starting_event)

    print(endC)
    print(bold + green +
          "## START : COMPARE QUALITY FROM CLASSIF IMAGE BY GRID" + endC)
    print(endC)

    if debug >= 2:
        print(
            bold + green +
            "comparareClassificationToReferenceGrid() : Variables dans la fonction"
            + endC)
        print(cyan + "comparareClassificationToReferenceGrid() : " + endC +
              "image_input : " + str(image_input) + endC)
        print(cyan + "comparareClassificationToReferenceGrid() : " + endC +
              "vector_cut_input : " + str(vector_cut_input) + endC)
        print(cyan + "comparareClassificationToReferenceGrid() : " + endC +
              "vector_sample_input : " + str(vector_sample_input) + endC)
        print(cyan + "comparareClassificationToReferenceGrid() : " + endC +
              "vector_grid_input : " + str(vector_grid_input) + endC)
        print(cyan + "comparareClassificationToReferenceGrid() : " + endC +
              "vector_grid_output : " + str(vector_grid_output) + endC)
        print(cyan + "comparareClassificationToReferenceGrid() : " + endC +
              "size_grid : " + str(size_grid) + endC)
        print(cyan + "comparareClassificationToReferenceGrid() : " + endC +
              "field_value_verif : " + str(field_value_verif))
        print(cyan + "comparareClassificationToReferenceGrid() : " + endC +
              "no_data_value : " + str(no_data_value))
        print(cyan + "comparareClassificationToReferenceGrid() : " + endC +
              "path_time_log : " + str(path_time_log) + endC)
        print(cyan + "comparareClassificationToReferenceGrid() : " + endC +
              "epsg  : " + str(epsg) + endC)
        print(cyan + "comparareClassificationToReferenceGrid() : " + endC +
              "format_raster : " + str(format_raster) + endC)
        print(cyan + "comparareClassificationToReferenceGrid() : " + endC +
              "format_vector : " + str(format_vector) + endC)
        print(cyan + "comparareClassificationToReferenceGrid() : " + endC +
              "extension_raster : " + str(extension_raster) + endC)
        print(cyan + "comparareClassificationToReferenceGrid() : " + endC +
              "extension_vector : " + str(extension_vector) + endC)
        print(cyan + "comparareClassificationToReferenceGrid() : " + endC +
              "save_results_intermediate : " + str(save_results_intermediate) +
              endC)
        print(cyan + "comparareClassificationToReferenceGrid() : " + endC +
              "overwrite : " + str(overwrite) + endC)

    # ETAPE 0 : PREPARATION DES FICHIERS INTERMEDIAIRES'

    CODAGE = "uint16"
    SUFFIX_STUDY = '_study'
    SUFFIX_TEMP = '_temp'
    SUFFIX_FUSION = '_other_fusion'

    NONE_VALUE_QUANTITY = -1.0
    FIELD_VALUE_OTHER = 65535

    FIELD_NAME_ID = "id"
    FIELD_NAME_RATE_BUILD = "rate_build"
    FIELD_NAME_RATE_OTHER = "rate_other"
    FIELD_NAME_SREF_BUILD = "sref_build"
    FIELD_NAME_SCLA_BUILD = "scla_build"
    FIELD_NAME_SREF_OTHER = "sref_other"
    FIELD_NAME_SCLA_OTHER = "scla_other"
    FIELD_NAME_KAPPA = "kappa"
    FIELD_NAME_ACCURACY = "accuracy"

    pixel_size_x, pixel_size_y = getPixelWidthXYImage(image_input)

    repertory_output = os.path.dirname(vector_grid_output)
    base_name = os.path.splitext(os.path.basename(vector_grid_output))[0]

    vector_study = repertory_output + os.sep + base_name + SUFFIX_STUDY + extension_vector
    vector_grid_temp = repertory_output + os.sep + base_name + SUFFIX_TEMP + extension_vector
    image_raster_other_fusion = repertory_output + os.sep + base_name + SUFFIX_FUSION + extension_raster

    # ETAPE 0 : VERIFICATION

    # Verification de la valeur de la nomemclature à verifier
    if field_value_verif >= FIELD_VALUE_OTHER:
        print(
            cyan + "comparareClassificationToReferenceGrid() : " + bold + red +
            "Attention de valeur de nomenclature à vérifier  : " +
            str(field_value_verif) +
            " doit être inferieur à la valeur de fusion des valeur autre arbitraire de : "
            + str(FIELD_VALUE_OTHER) + endC,
            file=sys.stderr)
        sys.exit(1)  #exit with an error code

    # ETAPE 1 : DEFINIR UN SHAPE ZONE D'ETUDE

    if (not vector_cut_input is None) and (vector_cut_input != "") and (
            os.path.isfile(vector_cut_input)):
        cutting_action = True
        vector_study = vector_cut_input
    else:
        cutting_action = False
        createVectorMask(image_input, vector_study)

    # ETAPE 2 : UNIFORMISATION DE LA ZONE OTHER

    # Réalocation des valeurs de classification pour les valeurs autre que le bati
    change_reaff_value_list = []
    reaff_value_list = identifyPixelValues(image_input)
    if field_value_verif in reaff_value_list:
        reaff_value_list.remove(field_value_verif)
    if no_data_value in reaff_value_list:
        reaff_value_list.remove(no_data_value)
    for elem in reaff_value_list:
        change_reaff_value_list.append(FIELD_VALUE_OTHER)
    reallocateClassRaster(image_input, image_raster_other_fusion,
                          reaff_value_list, change_reaff_value_list)

    # ETAPE 3 : CREATION DE LA GRILLE SUR LA ZONE D'ETUDE

    # Définir les attibuts du fichier
    attribute_dico = {
        FIELD_NAME_ID: ogr.OFTInteger,
        FIELD_NAME_RATE_BUILD: ogr.OFTReal,
        FIELD_NAME_RATE_OTHER: ogr.OFTReal,
        FIELD_NAME_SREF_BUILD: ogr.OFTReal,
        FIELD_NAME_SCLA_BUILD: ogr.OFTReal,
        FIELD_NAME_SREF_OTHER: ogr.OFTReal,
        FIELD_NAME_SCLA_OTHER: ogr.OFTReal,
        FIELD_NAME_KAPPA: ogr.OFTReal,
        FIELD_NAME_ACCURACY: ogr.OFTReal
    }
    nb_polygon = 0

    if (not vector_grid_input is None) and (vector_grid_input != "") and (
            os.path.isfile(vector_grid_input)):
        # Utilisation du fichier grille d'entrée

        # Recopie du fichier grille d'entrée vers le fichier grille de sortie
        copyVectorFile(vector_grid_input, vector_grid_output)

        # Ajout des champs au fichier grille de sortie
        for field_name in attribute_dico:
            addNewFieldVector(vector_grid_output, field_name,
                              attribute_dico[field_name], None, None, None,
                              format_vector)

        # Mettre le champs "id" identifiant du carré de l'élément de la grille
        nb_polygon = updateIndexVector(vector_grid_output, FIELD_NAME_ID,
                                       format_vector)

    else:
        # Si il n'existe pas de fichier grille on en créer un avec la valeur de size_grid

        # Creer le fichier grille
        nb_polygon = createGridVector(vector_study, vector_grid_temp,
                                      size_grid, size_grid, attribute_dico,
                                      overwrite, epsg, format_vector)

        # Découper la grille avec le shape zone d'étude
        cutVectorAll(vector_study, vector_grid_temp, vector_grid_output,
                     format_vector)

    # ETAPE 4 : CALCUL DE L'INDICATEUR DE QUALITE POUR CHAQUE CASE DE LA GRILLE

    if debug >= 2:
        print(bold + "nb_polygon = " + endC + str(nb_polygon) + "\n")

    # Pour chaque polygone existant
    sum_rate_quantity_build = 0
    nb_rate_sum = 0
    size_area_pixel = abs(pixel_size_x * pixel_size_y)

    for id_polygon in range(nb_polygon):
        geom_list = getGeomPolygons(vector_grid_output, FIELD_NAME_ID,
                                    id_polygon, format_vector)
        if geom_list is not None and geom_list != []:  # and (id_polygon == 24 or id_polygon == 30):

            if debug >= 1:
                print(cyan + "comparareClassificationToReferenceGrid() : " +
                      bold + green +
                      "Calcul de la matrice pour le polygon n°: " +
                      str(id_polygon) + endC)

            geom = geom_list[0]
            class_ref_list, class_pro_list, rate_quantity_list, kappa, accuracy, matrix = computeQualityIndiceRateQuantity(
                image_raster_other_fusion, vector_sample_input,
                repertory_output, base_name + str(id_polygon), geom, size_grid,
                pixel_size_x, pixel_size_y, field_value_verif,
                FIELD_VALUE_OTHER, no_data_value, epsg, format_raster,
                format_vector, extension_raster, extension_vector, overwrite,
                save_results_intermediate)

            # Si les calculs indicateurs de qualité sont ok
            if debug >= 2:
                print(matrix)
            if matrix != None and matrix != [] and matrix[0] != []:

                # Récuperer la quantité de bati et calcul de la surface de référence et de la surface de classification (carreau entier ou pas!)
                if len(class_ref_list) == 2 and len(
                        class_pro_list
                ) == 2:  # Cas ou l'on a des pixels de build et other (en ref et en prod)
                    rate_quantity_build = rate_quantity_list[0]
                    rate_quantity_other = rate_quantity_list[1]
                    size_area_ref_build = (matrix[0][0] +
                                           matrix[0][1]) * size_area_pixel
                    size_area_classif_build = (matrix[0][0] +
                                               matrix[1][0]) * size_area_pixel
                    size_area_ref_other = (matrix[1][0] +
                                           matrix[1][1]) * size_area_pixel
                    size_area_classif_other = (matrix[0][1] +
                                               matrix[1][1]) * size_area_pixel
                    sum_rate_quantity_build += rate_quantity_build
                    nb_rate_sum += 1

                else:  # Cas ou l'on a uniquement des pixels de build OU uniquement des pixels de other

                    if class_ref_list[
                            0] == field_value_verif:  # Cas ou l'on a uniquement des pixels references build
                        rate_quantity_build = rate_quantity_list[0]
                        rate_quantity_other = NONE_VALUE_QUANTITY
                        size_area_ref_other = 0

                        if len(
                                class_pro_list
                        ) == 2:  # Cas ou l'on a des pixels de prod build et other
                            size_area_ref_build = (
                                matrix[0][0] + matrix[0][1]) * size_area_pixel
                            size_area_classif_build = matrix[0][
                                0] * size_area_pixel
                            size_area_classif_other = matrix[0][
                                1] * size_area_pixel

                        else:
                            size_area_ref_build = matrix[0][0] * size_area_pixel
                            if class_pro_list[
                                    0] == field_value_verif:  # Cas ou l'on a uniquement des pixels prod build
                                size_area_classif_build = matrix[0][
                                    0] * size_area_pixel
                                size_area_classif_other = 0

                            else:  # Cas ou l'on a uniquement des pixels prod other
                                size_area_classif_build = 0
                                size_area_classif_other = matrix[0][
                                    0] * size_area_pixel

                    else:  # Cas ou l'on a uniquement des pixels references other
                        rate_quantity_build = NONE_VALUE_QUANTITY
                        rate_quantity_other = rate_quantity_list[0]
                        size_area_ref_build = 0

                        if len(
                                class_pro_list
                        ) == 2:  # Cas ou l'on a des pixels de prod build et other
                            size_area_ref_other = (
                                matrix[0][0] + matrix[0][1]) * size_area_pixel
                            size_area_classif_build = matrix[0][
                                0] * size_area_pixel
                            size_area_classif_other = matrix[0][
                                1] * size_area_pixel

                        else:
                            size_area_ref_other = matrix[0][0] * size_area_pixel
                            if class_pro_list[
                                    0] == field_value_verif:  # Cas ou l'on a uniquement des pixels prod build
                                size_area_classif_build = matrix[0][
                                    0] * size_area_pixel
                                size_area_classif_other = 0

                            else:  # Cas ou l'on a uniquement des pixels prod other
                                size_area_classif_build = 0
                                size_area_classif_other = matrix[0][
                                    0] * size_area_pixel

                # Mettre à jour ses éléments du carré de la grille
                setAttributeValues(
                    vector_grid_output, FIELD_NAME_ID, id_polygon, {
                        FIELD_NAME_RATE_BUILD: rate_quantity_build,
                        FIELD_NAME_RATE_OTHER: rate_quantity_other,
                        FIELD_NAME_SREF_BUILD: size_area_ref_build,
                        FIELD_NAME_SCLA_BUILD: size_area_classif_build,
                        FIELD_NAME_SREF_OTHER: size_area_ref_other,
                        FIELD_NAME_SCLA_OTHER: size_area_classif_other,
                        FIELD_NAME_KAPPA: kappa,
                        FIELD_NAME_ACCURACY: accuracy
                    }, format_vector)

    # Calcul de la moyenne
    if nb_rate_sum != 0:
        average_quantity_build = sum_rate_quantity_build / nb_rate_sum
    else:
        average_quantity_build = 0
    if debug >= 2:
        print(bold + "nb_polygon_used = " + endC + str(nb_rate_sum))
        print(bold + "average_quantity_build = " + endC +
              str(average_quantity_build) + "\n")

    # ETAPE 5 : SUPPRESIONS FICHIERS INTERMEDIAIRES INUTILES

    # Suppression des données intermédiairess
    if not save_results_intermediate:

        if not cutting_action:
            if os.path.isfile(vector_study):
                removeVectorFile(vector_study)

        if os.path.isfile(image_raster_other_fusion):
            removeFile(image_raster_other_fusion)

        if os.path.isfile(vector_grid_temp):
            removeVectorFile(vector_grid_temp)

    print(endC)
    print(bold + green +
          "## END : COMPARE QUALITY FROM CLASSIF IMAGE BY GRID" + endC)
    print(endC)

    # Mise à jour du Log
    ending_event = "comparareClassificationToReferenceGrid() :  ending : "
    timeLine(path_time_log, ending_event)

    return average_quantity_build
def binaryMaskVect(input_image,
                   output_dir,
                   threshold,
                   input_cut_vector,
                   attributes_list,
                   no_data_value,
                   epsg,
                   format_raster="GTiff",
                   format_vector="ESRI Shapefile",
                   extension_raster=".tif",
                   extension_vector=".shp",
                   save_results_intermediate=False,
                   overwrite=True):

    # Affichage des paramètres
    if debug >= 3:
        print(bold + green +
              "Variables dans le binaryMaskVect - Variables générales" + endC)
        print(cyan + "binaryMaskVect() : " + endC + "input_image : " +
              str(input_image) + endC)
        print(cyan + "binaryMaskVect() : " + endC + "output_dir : " +
              str(output_dir) + endC)
        print(cyan + "binaryMaskVect() : " + endC + "threshold : " +
              str(threshold) + endC)
        print(cyan + "binaryMaskVect() : " + endC + "input_cut_vector : " +
              str(input_cut_vector) + endC)
        print(cyan + "binaryMaskVect() : " + endC + "format_raster : " +
              str(format_raster) + endC)
        print(cyan + "binaryMaskVect() : " + endC + "format_vector : " +
              str(format_vector) + endC)
        print(cyan + "binaryMaskVect() : " + endC + "extension_raster : " +
              str(extension_raster) + endC)
        print(cyan + "binaryMaskVect() : " + endC + "extension_vector : " +
              str(extension_vector) + endC)
        print(cyan + "binaryMaskVect() : " + endC +
              "save_results_intermediate : " + str(save_results_intermediate) +
              endC)
        print(cyan + "binaryMaskVect() : " + endC + "overwrite : " +
              str(overwrite) + endC)

    image_name = os.path.splitext(os.path.basename(input_image))[0]
    binary_mask = output_dir + os.sep + "bin_mask_" + image_name + "_" + str(
        threshold).replace('.', '_') + extension_raster
    binary_mask_decoup = output_dir + os.sep + "bin_mask_decoup_" + image_name + "_" + str(
        threshold).replace('.', '_') + extension_raster
    binary_mask_vector = output_dir + os.sep + "bin_mask_vect_" + image_name + "_" + str(
        threshold).replace('.', '_') + extension_vector

    # Création du répertoire de sortie s'il n'existe pas déjà
    if not os.path.exists(output_dir):
        os.makedirs(output_dir)

    # Suppression des fichiers temporaires pour les calculs
    if os.path.exists(binary_mask):
        removeFile(binary_mask)

    if os.path.exists(binary_mask_decoup):
        removeFile(binary_mask_decoup)

    if os.path.exists(binary_mask_vector):
        if overwrite:
            removeVectorFile(binary_mask_vector, format_vector)
        else:
            return binary_mask_vector

    # Création du masque binaire
    createBinaryMask(input_image, binary_mask, threshold, False)

    if input_cut_vector != "":
        # Découpe du raster
        cutImageByVector(input_cut_vector, binary_mask, binary_mask_decoup,
                         None, None, no_data_value, epsg, format_raster,
                         format_vector)
    else:
        binary_mask_decoup = binary_mask

    # Vectorisation du masque binaire découpé
    polygonizeRaster(binary_mask_decoup, binary_mask_vector, image_name, "id",
                     format_vector)

    # Ajout des champs au fichier vecteur créé
    for attribute in attributes_list:
        addNewFieldVector(binary_mask_vector, attribute.name,
                          attribute.ogrType, attribute.value, attribute.width,
                          None, format_vector)

    # Suppresions des fichiers intermediaires inutiles et reperoire temporaire
    if not save_results_intermediate:
        removeFile(binary_mask)
        removeFile(binary_mask_decoup)

    return binary_mask_vector
def prepareData(input_buffer_tdc,
                input_paysage,
                output_dir,
                input_repertories_list,
                id_paysage,
                id_name_sub_rep,
                epsg,
                optimization_zone,
                no_cover,
                zone_date,
                separ_name,
                pos_date,
                nb_char_date,
                separ_date,
                path_time_log,
                format_raster='GTiff',
                format_vector="ESRI Shapefile",
                extension_raster=".tif",
                extension_vector=".shp",
                save_results_intermediate=True,
                overwrite=True):
    # Mise à jour du Log
    starting_event = "prepareData() : Select prepare data starting : "
    timeLine(path_time_log, starting_event)

    # Affichage des paramètres
    if debug >= 3:
        print(bold + green +
              "Variables dans le prepareData - Variables générales" + endC)
        print(cyan + "PrepareData() : " + endC + "input_buffer_tdc : " +
              str(input_buffer_tdc) + endC)
        print(cyan + "PrepareData() : " + endC + "input_paysage : " +
              str(input_paysage) + endC)
        print(cyan + "PrepareData() : " + endC + "output_dir : " +
              str(output_dir) + endC)
        print(cyan + "PrepareData() : " + endC + "input_repertories_list : " +
              str(input_repertories_list) + endC)
        print(cyan + "PrepareData() : " + endC + "id_paysage : " +
              str(id_paysage) + endC)
        print(cyan + "PrepareData() : " + endC + "id_name_sub_rep : " +
              str(id_name_sub_rep) + endC)
        print(cyan + "PrepareData() : " + endC + "epsg : " + str(epsg) + endC)
        print(cyan + "PrepareData() : " + endC + "optimization_zone : " +
              str(optimization_zone) + endC)
        print(cyan + "PrepareData() : " + endC + "no_cover : " +
              str(no_cover) + endC)
        print(cyan + "PrepareData() : " + endC + "zone_date : " +
              str(zone_date) + endC)
        print(cyan + "PrepareData() : " + endC + "separ_name : " +
              str(separ_name) + endC)
        print(cyan + "PrepareData() : " + endC + "pos_date : " +
              str(pos_date) + endC)
        print(cyan + "PrepareData() : " + endC + "nb_char_date : " +
              str(nb_char_date) + endC)
        print(cyan + "PrepareData() : " + endC + "separ_date : " +
              str(separ_date) + endC)
        print(cyan + "PrepareData() : " + endC + "path_time_log : " +
              str(path_time_log) + endC)
        print(cyan + "PrepareData() : " + endC + "format_raster : " +
              str(format_raster) + endC)
        print(cyan + "PrepareData() : " + endC + "format_vector : " +
              str(format_vector) + endC)
        print(cyan + "PrepareData() : " + endC + "extension_raster : " +
              str(extension_raster) + endC)
        print(cyan + "PrepareData() : " + endC + "extension_vector : " +
              str(extension_vector) + endC)
        print(cyan + "PrepareData() : " + endC + "save_results_inter : " +
              str(save_results_intermediate) + endC)
        print(cyan + "PrepareData() : " + endC + "overwrite : " +
              str(overwrite) + endC)

    REPERTORY_PAYSAGES = "Paysages"
    REPERTORY_IMAGES = "Images"
    ID_P = "id_p"

    SUFFIX_OPTI = "_opti"
    SUFFIX_ASS = "_ass"
    SUFFIX_CUT = "_cut"
    SUFFIX_ERROR = "_error"
    SUFFIX_MERGE = "_merge"
    SUFFIX_CLEAN = "_clean"
    SUFFIX_STACK = "_stack"

    output_dir_paysages = output_dir + os.sep + REPERTORY_PAYSAGES
    output_dir_images = output_dir + os.sep + REPERTORY_IMAGES

    # Création du répertoire de sortie s'il n'existe pas déjà
    if not os.path.exists(output_dir):
        os.makedirs(output_dir)

    # Création du répertoire de sortie pour les paysages s'il n'existe pas
    if not os.path.exists(output_dir_paysages):
        os.makedirs(output_dir_paysages)

    # Création du répertoire de sortie pour les images s'il n'existe pas
    if not os.path.exists(output_dir_images):
        os.makedirs(output_dir_images)

    # Recuperer l'epsg du fichier d'emprise
    if epsg == 0:
        epsg = getProjection(input_paysage, format_vector)

    # Création du paysage optimal
    optimPaysage(input_buffer_tdc, input_paysage, optimization_zone,
                 SUFFIX_OPTI, output_dir_paysages, id_paysage, format_vector)

    # Création un shapefile par polygone
    paysage_opti = output_dir_paysages + os.sep + os.path.splitext(
        os.path.basename(input_paysage))[0] + SUFFIX_OPTI + os.path.splitext(
            input_paysage)[1]
    if id_paysage != "":
        paysages_list = splitVector(paysage_opti, str(output_dir_paysages),
                                    str(id_paysage), epsg, format_vector,
                                    extension_vector)
    else:
        paysages_list = splitVector(paysage_opti, str(output_dir_paysages),
                                    ID_P, epsg, format_vector,
                                    extension_vector)

    if debug >= 3:
        print(cyan + "PrepareData() : " + endC +
              "Liste des fichiers en entrée de imagesAssembly() : " +
              str(paysages_list))

    # Création du fichier de sortie des images s'il n'existe pas dejà
    if not os.path.exists(output_dir_images):
        os.makedirs(output_dir_images)

    # Assemblage des images dans les paysages optimisés
    # Si on choisit pas de recouvrement entre les images
    if no_cover:
        sub_repertory_images_paysages_list = []
        repertory_images_sources_list = []
        repertory_input_image_dir = input_repertories_list[
            0]  # ATTENTION !!!! GFT c'est louche comme methode de prendre uniquement le premier repertoire d'image????

        # On cherche la colonne contenant le nom de l'image
        for shape in paysages_list:
            attribute_name_dico = {}
            attribute_name_dico[id_name_sub_rep] = ogr.OFTString
            res_values_dico = getAttributeValues(shape, None, None,
                                                 attribute_name_dico,
                                                 format_vector)
            sub_repertory_images_paysage = res_values_dico[id_name_sub_rep][0]
            sub_repertory_images_paysages_list.append(
                sub_repertory_images_paysage)

        # Si la colonne existe
        if len(sub_repertory_images_paysages_list) != 0:
            for sub_repertory_images_paysage in sub_repertory_images_paysages_list:
                repertory_images_sources_list.append(
                    repertory_input_image_dir + os.sep +
                    sub_repertory_images_paysage)

        # Sinon on demande d'entrer les noms des images
        else:
            print("Liste des paysages optimisés : " + str(paysages_list))
            repertory_images_sources_list = input(
                "Rentrez la liste des images associées à chaque paysage dans l'ordre sous forme de liste [..., ..., ...] \n"
            )
            while len(repertory_images_sources_list) != len(paysages_list):
                print(
                    "Longueur de la liste images différente de celle des paysages \n"
                )
                repertory_images_sources_list = input(
                    "Rentrez la liste des images associées à chaque paysage dans l'ordre \n"
                )
        if debug >= 2:
            print("repertory_images_sources_list " +
                  str(repertory_images_sources_list))

        # Commande ImagesAssembly sur les éléments des 2 listes
        for i in range(len(paysages_list)):
            image_output = output_dir_images + os.sep + os.path.splitext(
                os.path.basename(
                    paysages_list[i]))[0] + SUFFIX_ASS + extension_raster
            try:
                selectAssembyImagesByHold(
                    paysages_list[i], [repertory_images_sources_list[i]],
                    image_output, False, True, epsg, False, False, False,
                    False, 0, 0, 0, 0, separ_name, pos_date, nb_char_date,
                    separ_date, path_time_log, SUFFIX_ERROR, SUFFIX_MERGE,
                    SUFFIX_CLEAN, SUFFIX_STACK, format_raster, format_vector,
                    extension_raster, extension_vector,
                    save_results_intermediate, overwrite)
            except Exception:
                pass

    else:
        for shape in paysages_list:
            image_output = output_dir_images + os.sep + os.path.splitext(
                os.path.basename(shape))[0] + SUFFIX_ASS + extension_raster

            if optimization_zone:
                shape_cut = os.path.splitext(
                    shape)[0] + SUFFIX_CUT + os.path.splitext(shape)[1]
                cutVector(input_buffer_tdc, shape, shape_cut, overwrite,
                          format_vector)
            else:
                shape_cut = shape

            selectAssembyImagesByHold(shape_cut, input_repertories_list,
                                      image_output, False, zone_date, epsg,
                                      False, False, False, False, 0, 0, 0, 0,
                                      separ_name, pos_date, nb_char_date,
                                      separ_date, path_time_log, SUFFIX_ERROR,
                                      SUFFIX_MERGE, SUFFIX_CLEAN, SUFFIX_STACK,
                                      format_raster, format_vector,
                                      save_results_intermediate, overwrite)
            if optimization_zone and os.path.exists(shape_cut):
                removeVectorFile(shape_cut, format_vector)

    # Mise à jour du Log
    ending_event = "prepareData() : Select prepare data ending : "
    timeLine(path_time_log, ending_event)

    return
def estimateQualityMns(image_input,
                       vector_cut_input,
                       vector_sample_input_list,
                       vector_sample_points_input,
                       raster_input_dico,
                       vector_output,
                       no_data_value,
                       path_time_log,
                       format_raster='GTiff',
                       epsg=2154,
                       format_vector='ESRI Shapefile',
                       extension_raster=".tif",
                       extension_vector=".shp",
                       save_results_intermediate=False,
                       overwrite=True):

    # Mise à jour du Log
    starting_event = "estimateQualityMns() : Masks creation starting : "
    timeLine(path_time_log, starting_event)

    print(endC)
    print(bold + green + "## START : CREATE HEIGHT POINTS FILE FROM MNS" +
          endC)
    print(endC)

    if debug >= 2:
        print(bold + green +
              "estimateQualityMns() : Variables dans la fonction" + endC)
        print(cyan + "estimateQualityMns() : " + endC + "image_input : " +
              str(image_input) + endC)
        print(cyan + "estimateQualityMns() : " + endC + "vector_cut_input : " +
              str(vector_cut_input) + endC)
        print(cyan + "estimateQualityMns() : " + endC +
              "vector_sample_input_list : " + str(vector_sample_input_list) +
              endC)
        print(cyan + "estimateQualityMns() : " + endC +
              "vector_sample_points_input : " +
              str(vector_sample_points_input) + endC)
        print(cyan + "estimateQualityMns() : " + endC +
              "raster_input_dico : " + str(raster_input_dico) + endC)
        print(cyan + "estimateQualityMns() : " + endC + "vector_output : " +
              str(vector_output) + endC)
        print(cyan + "estimateQualityMns() : " + endC + "no_data_value : " +
              str(no_data_value))
        print(cyan + "estimateQualityMns() : " + endC + "path_time_log : " +
              str(path_time_log) + endC)
        print(cyan + "estimateQualityMns() : " + endC + "epsg  : " +
              str(epsg) + endC)
        print(cyan + "estimateQualityMns() : " + endC + "format_raster : " +
              str(format_raster) + endC)
        print(cyan + "estimateQualityMns() : " + endC + "format_vector : " +
              str(format_vector) + endC)
        print(cyan + "estimateQualityMns() : " + endC + "extension_raster : " +
              str(extension_raster) + endC)
        print(cyan + "estimateQualityMns() : " + endC + "extension_vector : " +
              str(extension_vector) + endC)
        print(cyan + "estimateQualityMns() : " + endC +
              "save_results_intermediate : " + str(save_results_intermediate) +
              endC)
        print(cyan + "estimateQualityMns() : " + endC + "overwrite : " +
              str(overwrite) + endC)

    # Définion des constantes
    EXT_DBF = '.dbf'
    EXT_CSV = '.csv'

    CODAGE = "uint16"

    SUFFIX_STUDY = '_study'
    SUFFIX_CUT = '_cut'
    SUFFIX_TEMP = '_temp'
    SUFFIX_CLEAN = '_clean'
    SUFFIX_SAMPLE = '_sample'

    ATTRIBUTE_ID = "ID"
    ATTRIBUTE_Z_INI = "Z_INI"
    ATTRIBUTE_Z_FIN = "Z_FIN"
    ATTRIBUTE_PREC_ALTI = "PREC_ALTI"
    ATTRIBUTE_Z_REF = "Z_Ref"
    ATTRIBUTE_Z_MNS = "Z_Mns"
    ATTRIBUTE_Z_DELTA = "Z_Delta"

    ERODE_EDGE_POINTS = -1.0

    ERROR_VALUE = -99.0
    ERROR_MIN_VALUE = -9999
    ERROR_MAX_VALUE = 9999

    # ETAPE 0 : PREPARATION DES FICHIERS INTERMEDIAIRES

    # Si le fichier de sortie existe on ecrase
    check = os.path.isfile(vector_output)
    if check and not overwrite:  # Si un fichier de sortie avec le même nom existe déjà, et si l'option ecrasement est à false, alors FIN
        print(cyan + "estimateQualityMns() : " + bold + yellow +
              "Create  file %s already exist : no actualisation" %
              (vector_output) + endC)
        return

    if os.path.isfile(os.path.splitext(vector_output)[0] + EXT_CSV):
        removeFile(os.path.splitext(vector_output)[0] + EXT_CSV)

    repertory_output = os.path.dirname(vector_output)
    base_name = os.path.splitext(os.path.basename(vector_output))[0]

    vector_output_temp = repertory_output + os.sep + base_name + SUFFIX_TEMP + extension_vector
    raster_study = repertory_output + os.sep + base_name + SUFFIX_STUDY + extension_raster
    vector_study = repertory_output + os.sep + base_name + SUFFIX_STUDY + extension_vector
    vector_study_clean = repertory_output + os.sep + base_name + SUFFIX_STUDY + SUFFIX_CLEAN + extension_vector
    image_cut = repertory_output + os.sep + base_name + SUFFIX_CUT + extension_raster
    vector_sample_temp = repertory_output + os.sep + base_name + SUFFIX_SAMPLE + SUFFIX_TEMP + extension_vector
    vector_sample_temp_clean = repertory_output + os.sep + base_name + SUFFIX_SAMPLE + SUFFIX_TEMP + SUFFIX_CLEAN + extension_vector

    # Utilisation des données raster externes
    raster_cut_dico = {}
    for raster_input in raster_input_dico:
        base_name_raster = os.path.splitext(os.path.basename(raster_input))[0]
        raster_cut = repertory_output + os.sep + base_name_raster + SUFFIX_CUT + extension_raster
        raster_cut_dico[raster_input] = raster_cut
        if os.path.exists(raster_cut):
            removeFile(raster_cut)

    # ETAPE 1 : DEFINIR UN SHAPE ZONE D'ETUDE

    if (not vector_cut_input is None) and (vector_cut_input != "") and (
            os.path.isfile(vector_cut_input)):
        cutting_action = True
        vector_study = vector_cut_input

    else:
        cutting_action = False
        createVectorMask(image_input, vector_study)

    # ETAPE 2 : DECOUPAGE DU RASTEUR PAR LE VECTEUR D'ETUDE SI BESOIN ET REECHANTILLONAGE SI BESOIN

    if cutting_action:
        # Identification de la tailles de pixels en x et en y du fichier MNS de reference
        pixel_size_x, pixel_size_y = getPixelWidthXYImage(image_input)

        # Si le fichier de sortie existe deja le supprimer
        if os.path.exists(image_cut):
            removeFile(image_cut)

        # Commande de découpe
        if not cutImageByVector(vector_study, image_input, image_cut,
                                pixel_size_x, pixel_size_y, no_data_value, 0,
                                format_raster, format_vector):
            print(
                cyan + "estimateQualityMns() : " + bold + red +
                "Une erreur c'est produite au cours du decoupage de l'image : "
                + image_input + endC,
                file=sys.stderr)
            raise

        if debug >= 2:
            print(cyan + "estimateQualityMns() : " + bold + green +
                  "DECOUPAGE DU RASTER %s AVEC LE VECTEUR %s" %
                  (image_input, vector_study) + endC)
    else:
        image_cut = image_input

    # Definir l'emprise du fichier MNS de reference

    # Decoupage de chaque raster de la liste des rasters
    for raster_input in raster_input_dico:
        raster_cut = raster_cut_dico[raster_input]
        if not cutImageByVector(vector_study, raster_input, raster_cut,
                                pixel_size_x, pixel_size_y, no_data_value, 0,
                                format_raster, format_vector):
            raise NameError(
                cyan + "estimateQualityMns() : " + bold + red +
                "Une erreur c'est produite au cours du decoupage du raster : "
                + raster_input + endC)

    # Gémotrie de l'image
    pixel_size_x, pixel_size_y = getPixelWidthXYImage(image_cut)
    cols, rows, bands = getGeometryImage(image_cut)
    xmin, xmax, ymin, ymax = getEmpriseImage(image_cut)

    if debug >= 3:
        print("Geometrie Image : ")
        print("  cols = " + str(cols))
        print("  rows = " + str(rows))
        print("  xmin = " + str(xmin))
        print("  xmax = " + str(xmax))
        print("  ymin = " + str(ymin))
        print("  ymax = " + str(ymax))
        print("  pixel_size_x = " + str(pixel_size_x))
        print("  pixel_size_y = " + str(pixel_size_y))
        print("\n")

    # Création du dico coordonnées des points en systeme cartographique
    points_random_value_dico = {}
    # liste coordonnées des points au format matrice image brute
    points_coordonnees_image_list = []

    # Selon que l'on utilise le fichier de points d'echantillons ou que l'on recréé a partir des sommets des vecteurs lignes
    if (vector_sample_points_input is None) or (vector_sample_points_input
                                                == ""):

        # ETAPE 3 : DECOUPAGES DES VECTEURS DE REFERENCE D'ENTREE PAR LE VECTEUR D'ETUDE ET LEUR FUSION ET
        #           LECTURE D'UN VECTEUR DE LIGNES ET SAUVEGARDE DES COORDONNEES POINTS DES EXTREMITEES ET LEUR HAUTEUR

        # Découpage des vecteurs de bd réference avec le vecteur zone d'étude
        vector_sample_input_cut_list = []
        for vector_sample in vector_sample_input_list:
            vector_name = os.path.splitext(os.path.basename(vector_sample))[0]
            vector_sample_cut = repertory_output + os.sep + vector_name + SUFFIX_CUT + extension_vector
            vector_sample_input_cut_list.append(vector_sample_cut)
        cutoutVectors(vector_study, vector_sample_input_list,
                      vector_sample_input_cut_list, format_vector)

        # Fusion des vecteurs de bd réference découpés
        fusionVectors(vector_sample_input_cut_list, vector_sample_temp,
                      format_vector)

        # Preparation des colonnes
        names_column_start_point_list = [
            ATTRIBUTE_ID, ATTRIBUTE_Z_INI, ATTRIBUTE_PREC_ALTI
        ]
        names_column_end_point_list = [
            ATTRIBUTE_ID, ATTRIBUTE_Z_FIN, ATTRIBUTE_PREC_ALTI
        ]
        fields_list = [
            ATTRIBUTE_ID, ATTRIBUTE_PREC_ALTI, ATTRIBUTE_Z_INI, ATTRIBUTE_Z_FIN
        ]

        multigeometries2geometries(vector_sample_temp,
                                   vector_sample_temp_clean, fields_list,
                                   "MULTILINESTRING", format_vector)
        points_coordinates_dico = readVectorFileLinesExtractTeminalsPoints(
            vector_sample_temp_clean, names_column_start_point_list,
            names_column_end_point_list, format_vector)

    else:
        # ETAPE 3_BIS : DECOUPAGE DE VECTEURS D'ECHANTILLONS POINTS PAR LE VECTEUR D'EMPRISE ET
        #               LECTURE DES COORDONNES D'ECHANTILLONS DURECTEMENT DANS LE FICHIER VECTEUR POINTS

        # Liste coordonnées des points au format matrice image brute
        cutVectorAll(vector_study, vector_sample_points_input,
                     vector_sample_temp, format_vector)
        points_coordinates_dico = readVectorFilePoints(vector_sample_temp,
                                                       format_vector)

    # ETAPE 4 : PREPARATION DU VECTEUR DE POINTS

    for index_key in points_coordinates_dico:
        # Recuperer les valeurs des coordonnees
        coord_info_list = points_coordinates_dico[index_key]
        coor_x = coord_info_list[0]
        coor_y = coord_info_list[1]
        attribut_dico = coord_info_list[2]

        # Coordonnées des points au format matrice image
        pos_x = int(round((coor_x - xmin) / abs(pixel_size_x)) - 1)
        pos_y = int(round((ymax - coor_y) / abs(pixel_size_y)) - 1)

        if pos_x < 0:
            pos_x = 0
        if pos_x >= cols:
            pos_x = cols - 1
        if pos_y < 0:
            pos_y = 0
        if pos_y >= rows:
            pos_y = rows - 1

        coordonnees_list = [pos_x, pos_y]
        points_coordonnees_image_list.append(coordonnees_list)

        value_ref = 0.0
        if ATTRIBUTE_Z_INI in attribut_dico.keys():
            value_ref = float(attribut_dico[ATTRIBUTE_Z_INI])
        if ATTRIBUTE_Z_FIN in attribut_dico.keys():
            value_ref = float(attribut_dico[ATTRIBUTE_Z_FIN])

        precision_alti = 0.0
        if ATTRIBUTE_PREC_ALTI in attribut_dico.keys():
            precision_alti = float(attribut_dico[ATTRIBUTE_PREC_ALTI])

        point_attr_dico = {
            ATTRIBUTE_ID: index_key,
            ATTRIBUTE_Z_REF: value_ref,
            ATTRIBUTE_PREC_ALTI: precision_alti,
            ATTRIBUTE_Z_MNS: 0.0,
            ATTRIBUTE_Z_DELTA: 0.0
        }

        for raster_input in raster_input_dico:
            field_name = raster_input_dico[raster_input][0][0]
            point_attr_dico[field_name] = 0.0

        points_random_value_dico[index_key] = [[coor_x, coor_y],
                                               point_attr_dico]

    # ETAPE 5 : LECTURE DES DONNEES DE HAUTEURS ISSU DU MNS et autre raster

    # Lecture dans le fichier raster des valeurs
    values_height_list = getPixelsValueListImage(
        image_cut, points_coordonnees_image_list)
    values_others_dico = {}
    for raster_input in raster_input_dico:
        raster_cut = raster_cut_dico[raster_input]
        values_list = getPixelsValueListImage(raster_cut,
                                              points_coordonnees_image_list)
        values_others_dico[raster_input] = values_list

    for i in range(len(points_random_value_dico)):
        value_mns = values_height_list[i]
        value_ref = points_random_value_dico[i][1][ATTRIBUTE_Z_REF]

        points_random_value_dico[i][1][ATTRIBUTE_Z_MNS] = float(value_mns)
        precision_alti = points_random_value_dico[i][1][ATTRIBUTE_PREC_ALTI]
        points_random_value_dico[i][1][ATTRIBUTE_PREC_ALTI] = float(
            precision_alti)
        value_diff = value_ref - value_mns
        points_random_value_dico[i][1][ATTRIBUTE_Z_DELTA] = float(value_diff)

        for raster_input in raster_input_dico:
            field_name = raster_input_dico[raster_input][0][0]
            value_other = values_others_dico[raster_input][i]
            points_random_value_dico[i][1][field_name] = float(value_other)

    # ETAPE 6 : CREATION D'UN VECTEUR DE POINTS AVEC DONNEE COORDONNES POINT ET HAUTEUR REFERENCE ET MNS

    # Suppression des points contenant des valeurs en erreur et en dehors du filtrage
    points_random_value_dico_clean = {}
    for i in range(len(points_random_value_dico)):
        value_ref = points_random_value_dico[i][1][ATTRIBUTE_Z_REF]
        if value_ref != ERROR_VALUE and value_ref > ERROR_MIN_VALUE and value_ref < ERROR_MAX_VALUE:

            points_is_valid = True
            for raster_input in raster_input_dico:
                if len(raster_input_dico[raster_input]) > 1 and len(
                        raster_input_dico[raster_input][1]) > 1:
                    threshold_min = float(
                        raster_input_dico[raster_input][1][0])
                    threshold_max = float(
                        raster_input_dico[raster_input][1][1])
                    field_name = raster_input_dico[raster_input][0][0]
                    value_raster = float(
                        points_random_value_dico[i][1][field_name])
                    if value_raster < threshold_min or value_raster > threshold_max:
                        points_is_valid = False

            if points_is_valid:
                points_random_value_dico_clean[i] = points_random_value_dico[i]

    # Définir les attibuts du fichier résultat
    attribute_dico = {
        ATTRIBUTE_ID: ogr.OFTInteger,
        ATTRIBUTE_PREC_ALTI: ogr.OFTReal,
        ATTRIBUTE_Z_REF: ogr.OFTReal,
        ATTRIBUTE_Z_MNS: ogr.OFTReal,
        ATTRIBUTE_Z_DELTA: ogr.OFTReal
    }

    for raster_input in raster_input_dico:
        field_name = raster_input_dico[raster_input][0][0]
        attribute_dico[field_name] = ogr.OFTReal

    createPointsFromCoordList(attribute_dico, points_random_value_dico_clean,
                              vector_output_temp, epsg, format_vector)

    # Suppression des points en bord de zone d'étude
    bufferVector(vector_study, vector_study_clean, ERODE_EDGE_POINTS, "", 1.0,
                 10, format_vector)
    cutVectorAll(vector_study_clean, vector_output_temp, vector_output, True,
                 format_vector)

    # ETAPE 7 : TRANSFORMATION DU FICHIER .DBF EN .CSV
    dbf_file = repertory_output + os.sep + base_name + EXT_DBF
    csv_file = repertory_output + os.sep + base_name + EXT_CSV

    if debug >= 2:
        print(cyan + "estimateQualityMns() : " + bold + green +
              "Conversion du fichier DBF %s en fichier CSV %s" %
              (dbf_file, csv_file) + endC)

    convertDbf2Csv(dbf_file, csv_file)

    # ETAPE 8 : SUPPRESIONS FICHIERS INTERMEDIAIRES INUTILES

    # Suppression des données intermédiaires
    if not save_results_intermediate:
        if cutting_action:
            if os.path.isfile(image_cut):
                removeFile(image_cut)
        else:
            if os.path.isfile(vector_study):
                removeVectorFile(vector_study)

        for raster_input in raster_input_dico:
            raster_cut = raster_cut_dico[raster_input]
            if os.path.isfile(raster_cut):
                removeFile(raster_cut)

        if os.path.isfile(vector_output_temp):
            removeVectorFile(vector_output_temp)

        if os.path.isfile(vector_study_clean):
            removeVectorFile(vector_study_clean)

        if os.path.isfile(vector_sample_temp):
            removeVectorFile(vector_sample_temp)

        if os.path.isfile(vector_sample_temp_clean):
            removeVectorFile(vector_sample_temp_clean)

        for vector_file in vector_sample_input_cut_list:
            if os.path.isfile(vector_file):
                removeVectorFile(vector_file)

    print(bold + green + "## END : CREATE HEIGHT POINTS FILE FROM MNSE" + endC)

    # Mise à jour du Log
    ending_event = "estimateQualityMns() : Masks creation ending : "
    timeLine(path_time_log, ending_event)

    return
Exemplo n.º 26
0
def statisticsVectorRaster(image_input,
                           vector_input,
                           vector_output,
                           band_number,
                           enable_stats_all_count,
                           enable_stats_columns_str,
                           enable_stats_columns_real,
                           col_to_delete_list,
                           col_to_add_list,
                           class_label_dico,
                           path_time_log,
                           clean_small_polygons=False,
                           format_vector='ESRI Shapefile',
                           save_results_intermediate=False,
                           overwrite=True):

    # INITIALISATION
    if debug >= 3:
        print(cyan + "statisticsVectorRaster() : " + endC + "image_input : " +
              str(image_input) + endC)
        print(cyan + "statisticsVectorRaster() : " + endC + "vector_input : " +
              str(vector_input) + endC)
        print(cyan + "statisticsVectorRaster() : " + endC +
              "vector_output : " + str(vector_output) + endC)
        print(cyan + "statisticsVectorRaster() : " + endC + "band_number : " +
              str(band_number) + endC)
        print(cyan + "statisticsVectorRaster() : " + endC +
              "enable_stats_all_count : " + str(enable_stats_all_count) + endC)
        print(cyan + "statisticsVectorRaster() : " + endC +
              "enable_stats_columns_str : " + str(enable_stats_columns_str) +
              endC)
        print(cyan + "statisticsVectorRaster() : " + endC +
              "enable_stats_columns_real : " + str(enable_stats_columns_real) +
              endC)
        print(cyan + "statisticsVectorRaster() : " + endC +
              "col_to_delete_list : " + str(col_to_delete_list) + endC)
        print(cyan + "statisticsVectorRaster() : " + endC +
              "col_to_add_list : " + str(col_to_add_list) + endC)
        print(cyan + "statisticsVectorRaster() : " + endC +
              "class_label_dico : " + str(class_label_dico) + endC)
        print(cyan + "statisticsVectorRaster() : " + endC +
              "clean_small_polygons : " + str(clean_small_polygons) + endC)
        print(cyan + "statisticsVectorRaster() : " + endC +
              "path_time_log : " + str(path_time_log) + endC)
        print(cyan + "statisticsVectorRaster() : " + endC +
              "format_vector : " + str(format_vector) + endC)
        print(cyan + "statisticsVectorRaster() : " + endC +
              "save_results_intermediate : " + str(save_results_intermediate) +
              endC)
        print(cyan + "statisticsVectorRaster() : " + endC + "overwrite : " +
              str(overwrite) + endC)

    # Constantes
    PREFIX_AREA_COLUMN = "S_"

    # Mise à jour du Log
    starting_event = "statisticsVectorRaster() : Compute statistic crossing starting : "
    timeLine(path_time_log, starting_event)

    # creation du fichier vecteur de sortie
    if vector_output == "":
        vector_output = vector_input  # Précisé uniquement pour l'affichage
    else:
        # Copy vector_output
        copyVectorFile(vector_input, vector_output, format_vector)

    # Vérifications
    image_xmin, image_xmax, image_ymin, image_ymax = getEmpriseImage(
        image_input)
    vector_xmin, vector_xmax, vector_ymin, vector_ymax = getEmpriseFile(
        vector_output, format_vector)
    extension_vector = os.path.splitext(vector_output)[1]

    if round(vector_xmin, 4) < round(image_xmin, 4) or round(
            vector_xmax, 4) > round(image_xmax, 4) or round(
                vector_ymin, 4) < round(image_ymin, 4) or round(
                    vector_ymax, 4) > round(image_ymax, 4):
        print(cyan + "statisticsVectorRaster() : " + bold + red +
              "image_xmin, image_xmax, image_ymin, image_ymax" + endC,
              image_xmin,
              image_xmax,
              image_ymin,
              image_ymax,
              file=sys.stderr)
        print(cyan + "statisticsVectorRaster() : " + bold + red +
              "vector_xmin, vector_xmax, vector_ymin, vector_ymax" + endC,
              vector_xmin,
              vector_xmax,
              vector_ymin,
              vector_ymax,
              file=sys.stderr)
        raise NameError(
            cyan + "statisticsVectorRaster() : " + bold + red +
            "The extend of the vector file (%s) is greater than the image file (%s)"
            % (vector_output, image_input) + endC)

    pixel_size = getPixelSizeImage(image_input)

    # Suppression des très petits polygones qui introduisent des valeurs NaN
    if clean_small_polygons:
        min_size_area = pixel_size * 2
        vector_temp = os.path.splitext(
            vector_output)[0] + "_temp" + extension_vector

        cleanMiniAreaPolygons(vector_output, vector_temp, min_size_area, '',
                              format_vector)
        removeVectorFile(vector_output, format_vector)
        renameVectorFile(vector_temp, vector_output)

    # Récuperation du driver pour le format shape
    driver = ogr.GetDriverByName(format_vector)

    # Ouverture du fichier shape en lecture-écriture
    data_source = driver.Open(vector_output,
                              1)  # 0 means read-only - 1 means writeable.
    if data_source is None:
        print(cyan + "statisticsVectorRaster() : " + bold + red +
              "Impossible d'ouvrir le fichier shape : " + vector_output + endC,
              file=sys.stderr)
        sys.exit(1)  # exit with an error code

    # Récupération du vecteur
    layer = data_source.GetLayer(
        0)  # Recuperation de la couche (une couche contient les polygones)
    layer_definition = layer.GetLayerDefn(
    )  # GetLayerDefn => returns the field names of the user defined (created) fields

    # ETAPE 1/4 : CREATION AUTOMATIQUE DU DICO DE VALEUR SI IL N'EXISTE PAS
    if enable_stats_all_count and class_label_dico == {}:
        image_values_list = identifyPixelValues(image_input)
        # Pour toutes les valeurs
        for id_value in image_values_list:
            class_label_dico[id_value] = str(id_value)
        # Suppression de la valeur no date à 0
        if 0 in class_label_dico:
            del class_label_dico[0]
    if debug >= 2:
        print(class_label_dico)

    # ETAPE 2/4 : CREATION DES COLONNES DANS LE FICHIER SHAPE
    if debug >= 2:
        print(
            cyan + "statisticsVectorRaster() : " + bold + green +
            "ETAPE 1/3 : DEBUT DE LA CREATION DES COLONNES DANS LE FICHIER VECTEUR %s"
            % (vector_output) + endC)

    # En entrée :
    # col_to_add_list = [UniqueID, majority/DateMaj/SrcMaj, minority, min, max, mean, median, sum, std, unique, range, all, count, all_S, count_S] - all traduisant le class_label_dico en autant de colonnes
    # Sous_listes de col_to_add_list à identifier pour des facilités de manipulations ultérieures:
    # col_to_add_inter01_list = [majority/DateMaj/SrcMaj, minority, min, max, mean, median, sum, std, unique, range]
    # col_to_add_inter02_list = [majority, minority, min, max, mean, median, sum, std, unique, range, all, count, all_S, count_S]
    # Construction des listes intermédiaires
    col_to_add_inter01_list = []

    # Valeurs à injecter dans des colonnes - Format String
    if enable_stats_columns_str:
        stats_columns_str_list = ['majority', 'minority']
        for e in stats_columns_str_list:
            col_to_add_list.append(e)

    # Valeurs à injecter dans des colonnes - Format Nbr
    if enable_stats_columns_real:
        stats_columns_real_list = [
            'min', 'max', 'mean', 'median', 'sum', 'std', 'unique', 'range'
        ]
        for e in stats_columns_real_list:
            col_to_add_list.append(e)

    # Valeurs à injecter dans des colonnes - Format Nbr
    if enable_stats_all_count:
        stats_all_count_list = ['all', 'count']
        for e in stats_all_count_list:
            col_to_add_list.append(e)

    # Valeurs à injecter dans des colonnes - si class_label_dico est non vide
    if class_label_dico != {}:
        stats_all_count_list = ['all', 'count']
        for e in stats_all_count_list:
            if not e in col_to_add_list:
                col_to_add_list.append(e)

    # Ajout colonne par colonne
    if "majority" in col_to_add_list:
        col_to_add_inter01_list.append("majority")
    if "DateMaj" in col_to_add_list:
        col_to_add_inter01_list.append("DateMaj")
    if "SrcMaj" in col_to_add_list:
        col_to_add_inter01_list.append("SrcMaj")
    if "minority" in col_to_add_list:
        col_to_add_inter01_list.append("minority")
    if "min" in col_to_add_list:
        col_to_add_inter01_list.append("min")
    if "max" in col_to_add_list:
        col_to_add_inter01_list.append("max")
    if "mean" in col_to_add_list:
        col_to_add_inter01_list.append("mean")
    if "median" in col_to_add_list:
        col_to_add_inter01_list.append("median")
    if "sum" in col_to_add_list:
        col_to_add_inter01_list.append("sum")
    if "std" in col_to_add_list:
        col_to_add_inter01_list.append("std")
    if "unique" in col_to_add_list:
        col_to_add_inter01_list.append("unique")
    if "range" in col_to_add_list:
        col_to_add_inter01_list.append("range")

    # Copy de col_to_add_inter01_list dans col_to_add_inter02_list
    col_to_add_inter02_list = list(col_to_add_inter01_list)

    if "all" in col_to_add_list:
        col_to_add_inter02_list.append("all")
    if "count" in col_to_add_list:
        col_to_add_inter02_list.append("count")
    if "all_S" in col_to_add_list:
        col_to_add_inter02_list.append("all_S")
    if "count_S" in col_to_add_list:
        col_to_add_inter02_list.append("count_S")
    if "DateMaj" in col_to_add_inter02_list:
        col_to_add_inter02_list.remove("DateMaj")
        col_to_add_inter02_list.insert(0, "majority")
    if "SrcMaj" in col_to_add_inter02_list:
        col_to_add_inter02_list.remove("SrcMaj")
        col_to_add_inter02_list.insert(0, "majority")

    # Valeurs à injecter dans des colonnes - Format Nbr
    if enable_stats_all_count:
        stats_all_count_list = ['all_S', 'count_S']
        for e in stats_all_count_list:
            col_to_add_list.append(e)

    # Creation de la colonne de l'identifiant unique
    if ("UniqueID" in col_to_add_list) or ("uniqueID" in col_to_add_list) or (
            "ID" in col_to_add_list):
        field_defn = ogr.FieldDefn(
            "ID", ogr.OFTInteger
        )  # Création du nom du champ dans l'objet stat_classif_field_defn
        layer.CreateField(field_defn)
        if debug >= 3:
            print(cyan + "statisticsVectorRaster() : " + endC +
                  "Creation de la colonne : ID")

    # Creation des colonnes de col_to_add_inter01_list ([majority/DateMaj/SrcMaj, minority, min, max, mean, median, sum, std, unique, range])
    for col in col_to_add_list:
        if layer_definition.GetFieldIndex(
                col
        ) == -1:  # Vérification de l'existence de la colonne col (retour = -1 : elle n'existe pas)
            if col == 'majority' or col == 'DateMaj' or col == 'SrcMaj' or col == 'minority':  # Identification de toutes les colonnes remplies en string
                stat_classif_field_defn = ogr.FieldDefn(
                    col, ogr.OFTString
                )  # Création du champ (string) dans l'objet stat_classif_field_defn
                layer.CreateField(stat_classif_field_defn)
            elif col == 'mean' or col == 'median' or col == 'sum' or col == 'std' or col == 'unique' or col == 'range' or col == 'max' or col == 'min':
                stat_classif_field_defn = ogr.FieldDefn(
                    col, ogr.OFTReal
                )  # Création du champ (real) dans l'objet stat_classif_field_defn
                # Définition de la largeur du champ
                stat_classif_field_defn.SetWidth(20)
                # Définition de la précision du champ valeur flottante
                stat_classif_field_defn.SetPrecision(2)
                layer.CreateField(stat_classif_field_defn)
            if debug >= 3:
                print(cyan + "statisticsVectorRaster() : " + endC +
                      "Creation de la colonne : " + str(col))

    # Creation des colonnes reliées au dictionnaire
    if ('all' in col_to_add_list) or ('count' in col_to_add_list) or (
            'all_S' in col_to_add_list) or ('count_S' in col_to_add_list):
        for col in class_label_dico:

            # Gestion du nom de la colonne correspondant à la classe
            name_col = class_label_dico[col]
            if len(name_col) > 10:
                name_col = name_col[:10]
                print(
                    cyan + "statisticsVectorRaster() : " + bold + yellow +
                    "Nom de la colonne trop long. Il sera tronque a 10 caracteres en cas d'utilisation: "
                    + endC + name_col)

            # Gestion du nom de la colonne correspondant à la surface de la classe
            name_col_area = PREFIX_AREA_COLUMN + name_col
            if len(name_col_area) > 10:
                name_col_area = name_col_area[:10]
                if debug >= 3:
                    print(
                        cyan + "statisticsVectorRaster() : " + bold + yellow +
                        "Nom de la colonne trop long. Il sera tronque a 10 caracteres en cas d'utilisation: "
                        + endC + name_col_area)

            # Ajout des colonnes de % de répartition des éléments du raster
            if ('all' in col_to_add_list) or ('count' in col_to_add_list):
                if layer_definition.GetFieldIndex(
                        name_col
                ) == -1:  # Vérification de l'existence de la colonne name_col (retour = -1 : elle n'existe pas)
                    stat_classif_field_defn = ogr.FieldDefn(
                        name_col, ogr.OFTReal
                    )  # Création du champ (real) dans l'objet stat_classif_field_defn
                    # Définition de la largeur du champ
                    stat_classif_field_defn.SetWidth(20)
                    # Définition de la précision du champ valeur flottante
                    stat_classif_field_defn.SetPrecision(2)
                    if debug >= 3:
                        print(cyan + "statisticsVectorRaster() : " + endC +
                              "Creation de la colonne : " + str(name_col))
                    layer.CreateField(
                        stat_classif_field_defn)  # Ajout du champ

            # Ajout des colonnes de surface des éléments du raster
            if ('all_S' in col_to_add_list) or ('count_S' in col_to_add_list):
                if layer_definition.GetFieldIndex(
                        name_col_area
                ) == -1:  # Vérification de l'existence de la colonne name_col_area (retour = -1 : elle n'existe pas)
                    stat_classif_field_defn = ogr.FieldDefn(
                        name_col_area, ogr.OFTReal
                    )  # Création du nom du champ dans l'objet stat_classif_field_defn
                    # Définition de la largeur du champ
                    stat_classif_field_defn.SetWidth(20)
                    # Définition de la précision du champ valeur flottante
                    stat_classif_field_defn.SetPrecision(2)

                    if debug >= 3:
                        print(cyan + "statisticsVectorRaster() : " + endC +
                              "Creation de la colonne : " + str(name_col_area))
                    layer.CreateField(
                        stat_classif_field_defn)  # Ajout du champ

    if debug >= 2:
        print(
            cyan + "statisticsVectorRaster() : " + bold + green +
            "ETAPE 1/3 : FIN DE LA CREATION DES COLONNES DANS LE FICHIER VECTEUR %s"
            % (vector_output) + endC)

    # ETAPE 3/4 : REMPLISSAGE DES COLONNES DU VECTEUR
    if debug >= 2:
        print(cyan + "statisticsVectorRaster() : " + bold + green +
              "ETAPE 2/3 : DEBUT DU REMPLISSAGE DES COLONNES DU VECTEUR " +
              endC)

    # Calcul des statistiques col_to_add_inter02_list = [majority, minority, min, max, mean, median, sum, std, unique, range, all, count, all_S, count_S] de croisement images_raster / vecteur
    # Utilisation de la librairie rasterstat
    if debug >= 3:
        print(cyan + "statisticsVectorRaster() : " + bold + green +
              "Calcul des statistiques " + endC +
              "Stats : %s - Vecteur : %s - Raster : %s" %
              (col_to_add_inter02_list, vector_output, image_input) + endC)
    stats_info_list = raster_stats(vector_output,
                                   image_input,
                                   band_num=band_number,
                                   stats=col_to_add_inter02_list)

    # Decompte du nombre de polygones
    num_features = layer.GetFeatureCount()
    if debug >= 3:
        print(cyan + "statisticsVectorRaster() : " + bold + green +
              "Remplissage des colonnes polygone par polygone " + endC)
    if debug >= 3:
        print(cyan + "statisticsVectorRaster() : " + endC +
              "Nombre total de polygones : " + str(num_features))

    polygone_count = 0

    for polygone_stats in stats_info_list:  # Pour chaque polygone représenté dans stats_info_list - et il y a autant de polygone que dans le fichier vecteur

        # Extraction de feature
        feature = layer.GetFeature(polygone_stats['__fid__'])

        polygone_count = polygone_count + 1

        if debug >= 3 and polygone_count % 10000 == 0:
            print(cyan + "statisticsVectorRaster() : " + endC +
                  "Avancement : %s polygones traites sur %s" %
                  (polygone_count, num_features))
        if debug >= 5:
            print(
                cyan + "statisticsVectorRaster() : " + endC +
                "Traitement du polygone : ",
                stats_info_list.index(polygone_stats) + 1)

        # Remplissage de l'identifiant unique
        if ("UniqueID" in col_to_add_list) or (
                "uniqueID" in col_to_add_list) or ("ID" in col_to_add_list):
            feature.SetField('ID', int(stats_info_list.index(polygone_stats)))

        # Initialisation à 0 des colonnes contenant le % de répartition de la classe - Verifier ce qu'il se passe si le nom dépasse 10 caracteres
        if ('all' in col_to_add_list) or ('count' in col_to_add_list):
            for element in class_label_dico:
                name_col = class_label_dico[element]
                if len(name_col) > 10:
                    name_col = name_col[:10]
                feature.SetField(name_col, 0)

        # Initialisation à 0 des colonnes contenant la surface correspondant à la classe - Verifier ce qu'il se passe si le nom dépasse 10 caracteres
        if ('all_S' in col_to_add_list) or ('count_S' in col_to_add_list):
            for element in class_label_dico:
                name_col = class_label_dico[element]
                name_col_area = PREFIX_AREA_COLUMN + name_col
                if len(name_col_area) > 10:
                    name_col_area = name_col_area[:10]
                feature.SetField(name_col_area, 0)

        # Remplissage des colonnes contenant le % de répartition et la surface des classes
        if ('all' in col_to_add_list) or ('count' in col_to_add_list) or (
                'all_S' in col_to_add_list) or ('count_S' in col_to_add_list):
            # 'all' est une liste des couples : (Valeur_du_pixel_sur_le_raster, Nbr_pixel_ayant_cette_valeur) pour le polygone observe.
            # Ex : [(0,183),(803,45),(801,4)] : dans le polygone, il y a 183 pixels de valeur 0, 45 pixels de valeur 803 et 4 pixels de valeur 801
            majority_all = polygone_stats['all']

            # Deux valeurs de pixel peuvent faire référence à une même colonne. Par exemple : les pixels à 201, 202, 203 peuvent correspondre à la BD Topo
            # Regroupement des éléments de majority_all allant dans la même colonne au regard de class_label_dico
            count_for_idx_couple = 0  # Comptage du nombre de modifications (suppression de couple) de majority_all pour adapter la valeur de l'index lors de son parcours

            for idx_couple in range(
                    1, len(majority_all)
            ):  # Inutile d'appliquer le traitement au premier élément (idx_couple == 0)

                idx_couple = idx_couple - count_for_idx_couple  # Prise en compte dans le parcours de majority_all des couples supprimés
                couple = majority_all[idx_couple]  # Ex : couple = (803,45)

                if (couple is None) or (
                        couple == ""
                ):  # en cas de bug de rasterstats (erreur geometrique du polygone par exemple)
                    if debug >= 3:
                        print(
                            cyan + "statisticsVectorRaster() : " + bold + red +
                            "Probleme detecte dans la gestion du polygone %s" %
                            (polygone_count) + endC,
                            file=sys.stderr)
                    pass
                else:
                    for idx_verif in range(idx_couple):
                        # Vérification au regard des éléments présents en amont dans majority_all
                        # Cas où le nom correspondant au label a déjà été rencontré dans majority_all
                        # Vérification que les pixels de l'image sont réferncés dans le dico
                        if couple[0] in class_label_dico:

                            if class_label_dico[couple[0]] == class_label_dico[
                                    majority_all[idx_verif][0]]:
                                majority_all[idx_verif] = (
                                    majority_all[idx_verif][0],
                                    majority_all[idx_verif][1] + couple[1]
                                )  # Ajout du nombre de pixels correspondant dans le couple précédent
                                majority_all.remove(
                                    couple
                                )  # Supression du couple présentant le "doublon"
                                count_for_idx_couple = count_for_idx_couple + 1  # Mise à jour du décompte de modifications
                                break
                        else:
                            raise NameError(
                                cyan + "statisticsVectorRaster() : " + bold +
                                red +
                                "The image file (%s) contain pixel value '%d' not identified into class_label_dico"
                                % (image_input, couple[0]) + endC)

            # Intégration des valeurs de majority all dans les colonnes
            for couple_value_count in majority_all:  # Parcours de majority_all. Ex : couple_value_count = (803,45)
                if (couple_value_count is None) or (
                        couple_value_count == ""
                ):  # en cas de bug de rasterstats (erreur geometrique du polygone par exemple)
                    if debug >= 3:
                        print(
                            cyan + "statisticsVectorRaster() : " + bold + red +
                            "Probleme detecte dans la gestion du polygone %s" %
                            (polygone_count) + endC,
                            file=sys.stderr)
                    pass
                else:
                    nb_pixel_total = polygone_stats[
                        'count']  # Nbr de pixels du polygone
                    pixel_value = couple_value_count[0]  # Valeur du pixel
                    value_count = couple_value_count[
                        1]  # Nbr de pixels ayant cette valeur
                    name_col = class_label_dico[
                        pixel_value]  # Transformation de la valeur du pixel en "signification" au regard du dictionnaire. Ex : BD Topo ou 2011
                    name_col_area = PREFIX_AREA_COLUMN + name_col  # Identification du nom de la colonne en surfaces

                    if len(name_col) > 10:
                        name_col = name_col[:10]
                    if len(name_col_area) > 10:
                        name_col_area = name_col_area[:10]

                    value_area = pixel_size * value_count  # Calcul de la surface du polygone correspondant à la valeur du pixel
                    if nb_pixel_total != None and nb_pixel_total != 0:
                        percentage = (
                            float(value_count) / float(nb_pixel_total)
                        ) * 100  # Conversion de la surface en pourcentages, arondi au pourcent
                    else:
                        if debug >= 3:
                            print(
                                cyan + "statisticsVectorRaster() : " + bold +
                                red +
                                "Probleme dans l'identification du nombre de pixels du polygone %s : le pourcentage de %s est mis à 0"
                                % (polygone_count, name_col) + endC,
                                file=sys.stderr)
                        percentage = 0.0

                    if ('all' in col_to_add_list) or ('count'
                                                      in col_to_add_list):
                        feature.SetField(
                            name_col, percentage
                        )  # Injection du pourcentage dans la colonne correpondante
                    if ('all_S' in col_to_add_list) or ('count_S'
                                                        in col_to_add_list):
                        feature.SetField(
                            name_col_area, value_area
                        )  # Injection de la surface dans la colonne correpondante
        else:
            pass

        # Remplissage des colonnes statistiques demandées ( col_to_add_inter01_list = [majority/DateMaj/SrcMaj, minority, min, max, mean, median, sum, std, unique, range] )
        for stats in col_to_add_inter01_list:

            if stats == 'DateMaj' or stats == 'SrcMaj':  # Cas particulier de 'DateMaj' et 'SrcMaj' : le nom de la colonne est DateMaj ou SrcMaj, mais la statistique utilisée est identifiée par majority
                name_col = stats  # Nom de la colonne. Ex : 'DateMaj'
                value_statis = polygone_stats[
                    'majority']  # Valeur majoritaire. Ex : '203'
                if value_statis == None:
                    value_statis_class = 'nan'
                else:
                    value_statis_class = class_label_dico[
                        value_statis]  # Transformation de la valeur au regard du dictionnaire. Ex : '2011'
                feature.SetField(name_col,
                                 value_statis_class)  # Ajout dans la colonne

            elif (stats is None) or (stats == "") or (
                    polygone_stats[stats] is
                    None) or (polygone_stats[stats]) == "" or (
                        polygone_stats[stats]) == 'nan':
                # En cas de bug de rasterstats (erreur geometrique du polygone par exemple)
                pass

            else:
                name_col = stats  # Nom de la colonne. Ex : 'majority', 'max'
                value_statis = polygone_stats[
                    stats]  # Valeur à associer à la colonne, par exemple '2011'

                if (
                        name_col == 'majority' or name_col == 'minority'
                ) and class_label_dico != []:  # Cas où la colonne fait référence à une valeur du dictionnaire
                    value_statis_class = class_label_dico[value_statis]
                else:
                    value_statis_class = value_statis

                feature.SetField(name_col, value_statis_class)

        layer.SetFeature(feature)
        feature.Destroy()

    if debug >= 2:
        print(cyan + "statisticsVectorRaster() : " + bold + green +
              "ETAPE 2/3 : FIN DU REMPLISSAGE DES COLONNES DU VECTEUR %s" %
              (vector_output) + endC)

    # ETAPE 4/4 : SUPRESSION DES COLONNES NON SOUHAITEES
    if col_to_delete_list != []:

        if debug >= 2:
            print(cyan + "statisticsVectorRaster() : " + bold + green +
                  "ETAPE 3/3 : DEBUT DES SUPPRESSIONS DES COLONNES %s" %
                  (col_to_delete_list) + endC)

        for col_to_delete in col_to_delete_list:

            if layer_definition.GetFieldIndex(
                    col_to_delete
            ) != -1:  # Vérification de l'existence de la colonne col (retour = -1 : elle n'existe pas)

                layer.DeleteField(layer_definition.GetFieldIndex(
                    col_to_delete))  # Suppression de la colonne

                if debug >= 3:
                    print(cyan + "statisticsVectorRaster() : " + endC +
                          "Suppression de %s" % (col_to_delete) + endC)

        if debug >= 2:
            print(cyan + "statisticsVectorRaster() : " + bold + green +
                  "ETAPE 3/3 : FIN DE LA SUPPRESSION DES COLONNES" + endC)

    else:
        print(cyan + "statisticsVectorRaster() : " + bold + yellow +
              "ETAPE 3/3 : AUCUNE SUPPRESSION DE COLONNE DEMANDEE" + endC)

    # Fermeture du fichier shape
    layer.SyncToDisk()
    layer = None
    data_source.Destroy()

    # Mise à jour du Log
    ending_event = "statisticsVectorRaster() : Compute statistic crossing ending : "
    timeLine(path_time_log, ending_event)

    return
def prepareData(input_buffer_tdc, input_paysage, output_dir, input_repertories_list, id_paysage, id_name_sub_rep, epsg, optimization_zone, no_cover, zone_date, separ_name, pos_date, nb_char_date, separ_date, path_time_log, format_raster='GTiff', format_vector="ESRI Shapefile", extension_raster=".tif", extension_vector=".shp", save_results_intermediate=True, overwrite=True):
    # Mise à jour du Log
    starting_event = "prepareData() : Select prepare data starting : "
    timeLine(path_time_log,starting_event)

    # Affichage des paramètres
    if debug >= 3:
        print(bold + green + "Variables dans le prepareData - Variables générales" + endC)
        print(cyan + "PrepareData() : " + endC + "input_buffer_tdc : " + str(input_buffer_tdc) + endC)
        print(cyan + "PrepareData() : " + endC + "input_paysage : " + str(input_paysage) + endC)
        print(cyan + "PrepareData() : " + endC + "output_dir : " + str(output_dir) + endC)
        print(cyan + "PrepareData() : " + endC + "input_repertories_list : " + str(input_repertories_list) + endC)
        print(cyan + "PrepareData() : " + endC + "id_paysage : " + str(id_paysage) + endC)
        print(cyan + "PrepareData() : " + endC + "id_name_sub_rep : " + str(id_name_sub_rep) + endC)
        print(cyan + "PrepareData() : " + endC + "epsg : " + str(epsg) +endC)
        print(cyan + "PrepareData() : " + endC + "optimization_zone : " + str(optimization_zone) + endC)
        print(cyan + "PrepareData() : " + endC + "no_cover : " + str(no_cover) + endC)
        print(cyan + "PrepareData() : " + endC + "zone_date : " + str(zone_date) + endC)
        print(cyan + "PrepareData() : " + endC + "separ_name : " + str(separ_name) + endC)
        print(cyan + "PrepareData() : " + endC + "pos_date : " + str(pos_date) + endC)
        print(cyan + "PrepareData() : " + endC + "nb_char_date : " + str(nb_char_date) + endC)
        print(cyan + "PrepareData() : " + endC + "separ_date : " + str(separ_date) + endC)
        print(cyan + "PrepareData() : " + endC + "path_time_log : " + str(path_time_log) + endC)
        print(cyan + "PrepareData() : " + endC + "format_raster : " + str(format_raster) + endC)
        print(cyan + "PrepareData() : " + endC + "format_vector : " + str(format_vector) + endC)
        print(cyan + "PrepareData() : " + endC + "extension_raster : " + str(extension_raster) + endC)
        print(cyan + "PrepareData() : " + endC + "extension_vector : " + str(extension_vector) + endC)
        print(cyan + "PrepareData() : " + endC + "save_results_inter : " + str(save_results_intermediate) + endC)
        print(cyan + "PrepareData() : " + endC + "overwrite : " + str(overwrite) + endC)

    REPERTORY_PAYSAGES = "Paysages"
    REPERTORY_IMAGES = "Images"
    ID_P = "id_p"

    SUFFIX_OPTI = "_opti"
    SUFFIX_CUT = "_cut"
    SUFFIX_ERROR = "_error"
    SUFFIX_MERGE = "_merge"
    SUFFIX_CLEAN = "_clean"
    SUFFIX_STACK = "_stack"

    output_dir_paysages = output_dir + os.sep + REPERTORY_PAYSAGES
    output_dir_images = output_dir + os.sep + REPERTORY_IMAGES

    # Création du répertoire de sortie s'il n'existe pas déjà
    if not os.path.exists(output_dir):
        os.makedirs(output_dir)

    # Création du répertoire de sortie pour les paysages s'il n'existe pas
    if not os.path.exists(output_dir_paysages):
        os.makedirs(output_dir_paysages)

    # Création du répertoire de sortie pour les images s'il n'existe pas
    if not os.path.exists(output_dir_images):
        os.makedirs(output_dir_images)

    # Recuperer l'epsg du fichier d'emprise
    if epsg == 0 :
        epsg = getProjection(input_paysage, format_vector)

    # Création du paysage optimal
    optimPaysage(input_buffer_tdc, input_paysage, optimization_zone, SUFFIX_OPTI, output_dir_paysages, id_paysage, format_vector)

    # Création un shapefile par polygone
    paysage_opti = output_dir_paysages + os.sep + os.path.splitext(os.path.basename(input_paysage))[0] + SUFFIX_OPTI + os.path.splitext(input_paysage)[1]
    if id_paysage != "" :
        paysages_list = splitVector(paysage_opti, str(output_dir_paysages), str(id_paysage), epsg, format_vector, extension_vector)
    else:
        paysages_list = splitVector(paysage_opti, str(output_dir_paysages), ID_P, epsg, format_vector, extension_vector)

    if debug >= 3:
        print(cyan + "PrepareData() : " + endC + "Liste des fichiers en entrée de imagesAssembly() : " + str(paysages_list))

    # Création du fichier de sortie des images s'il n'existe pas dejà
    if not os.path.exists(output_dir_images):
        os.makedirs(output_dir_images)

    # Assemblage des images dans les paysages optimisés
    # Si on choisit pas de recouvrement entre les images
    if no_cover:

        # Récupération des noms de sortie
        image_output_list = []
        id_paysage_list = []
        for shape in paysages_list :
            attribute_name_dico = {}
            attribute_name_dico[id_name_sub_rep] = ogr.OFTString
            attribute_name_dico[id_paysage] = ogr.OFTInteger
            res_values_dico = getAttributeValues(shape, None, None, attribute_name_dico, format_vector)
            id_name_sub_rep_value = res_values_dico[id_name_sub_rep][0]
            id_paysage_value = res_values_dico[id_paysage][0]
            image_output_list.append(id_name_sub_rep_value)
            id_paysage_list.append(id_paysage_value)
        if debug >= 3:
            print("image_output_list " + str(image_output_list))

        # Récupération de tous les (sous-)répertoires
        repertory_images_sources_list_temp = []
        for input_dir in input_repertories_list:
            sub_rep_list = getSubRepRecursifList(input_dir)
            if sub_rep_list != []:
                for sub_rep in sub_rep_list:
                    repertory_images_sources_list_temp.append(sub_rep)
            else:
                repertory_images_sources_list_temp.append(input_dir)
        # On réorganise pour avoir le même ordre dans les 2 listes 'repertory_images_sources_list' et 'image_output_list'
        repertory_images_sources_list = []
        for paysage in id_paysage_list:
            for repertory_images_source in repertory_images_sources_list_temp:
                if str(paysage) in repertory_images_source.split(os.sep)[-1]:
                    repertory_images_sources_list.append(repertory_images_source)
        if debug >= 3:
            print("repertory_images_sources_list " + str(repertory_images_sources_list))

        if len(repertory_images_sources_list) != len(image_output_list):
            raise Exception(bold + red + "Error: not same number of input repertories and output files." + endC)

        # Commande ImagesAssembly sur les éléments des 2 listes
        for i in range(len(paysages_list)):
            image_output = output_dir_images + os.sep + image_output_list[i]
            if debug >= 3:
                 print(cyan + "PrepareData() : " + endC + bold + green  + "image_output : " + endC + image_output)

            try:
                # ~ selectAssembyImagesByHold(paysages_list[i], [repertory_images_sources_list[i]], image_output, False, True, epsg, False, False, False, False, 0, 0, 0, 0, separ_name, pos_date, nb_char_date, separ_date, path_time_log, SUFFIX_ERROR, SUFFIX_MERGE, SUFFIX_CLEAN, SUFFIX_STACK, format_raster, format_vector, extension_raster, extension_vector, save_results_intermediate, overwrite)
                selectAssembyImagesByHold(paysages_list[i], [repertory_images_sources_list[i]], image_output, False, zone_date, epsg, False, False, False, False, 0, 0, 0, 0, separ_name, pos_date, nb_char_date, separ_date, path_time_log, SUFFIX_ERROR, SUFFIX_MERGE, SUFFIX_CLEAN, SUFFIX_STACK, format_raster, format_vector, extension_raster, extension_vector, save_results_intermediate, overwrite)
            except Exception:
                pass

    else:
        for shape in paysages_list :
            # ~ filename = os.path.basename(shape)
            # ~ info_name = filename.split(separ_name, 2)[POS]
            # ~ name_shape = info_name[:NB_CHAR]
            # ~ image_output = output_dir_images + os.sep + name_shape + SUFFIX_ASS + extension_raster

        # ~ for shape in sub_repertory_images_paysages_list :
            image_output = output_dir_images + os.sep + os.path.splitext(os.path.basename(shape)) + extension_raster

            if optimization_zone :
                shape_cut = os.path.splitext(shape)[0] + SUFFIX_CUT + os.path.splitext(shape)[1]
                cutVector(input_buffer_tdc, shape, shape_cut, overwrite, format_vector)
            else :
                shape_cut = shape

            selectAssembyImagesByHold(shape_cut, input_repertories_list, image_output, False, zone_date, epsg, False, False, False, False, 0, 0, 0, 0, separ_name, pos_date, nb_char_date, separ_date, path_time_log, SUFFIX_ERROR, SUFFIX_MERGE, SUFFIX_CLEAN, SUFFIX_STACK, format_raster, format_vector, save_results_intermediate, overwrite)
            if optimization_zone and os.path.exists(shape_cut):
                removeVectorFile(shape_cut, format_vector)

    # Mise à jour du Log
    ending_event = "prepareData() : Select prepare data ending : "
    timeLine(path_time_log,ending_event)

    if debug >= 3:
        print(cyan + "PrepareData() : " + endC + bold + green  + "Fin de traitement")
    return
def classesOfWaterHeights(input_flooded_areas_vector,
                          input_digital_elevation_model_file,
                          output_heights_classes_file,
                          output_heights_classes_vector,
                          heights_classes='0,0.5,1,1.5,2',
                          epsg=2154,
                          no_data_value=0,
                          format_raster='GTiff',
                          format_vector='ESRI Shapefile',
                          extension_raster='.tif',
                          extension_vector='.shp',
                          grass_gisbase=os.environ['GISBASE'],
                          grass_gisdb='GRASS_database',
                          grass_location='LOCATION',
                          grass_mapset='MAPSET',
                          path_time_log='',
                          save_results_intermediate=False,
                          overwrite=True):

    if debug >= 3:
        print('\n' + bold + green +
              "Classes de hauteurs d'eau - Variables dans la fonction :" +
              endC)
        print(cyan + "    classesOfWaterHeights() : " + endC +
              "input_flooded_areas_vector : " +
              str(input_flooded_areas_vector) + endC)
        print(cyan + "    classesOfWaterHeights() : " + endC +
              "input_digital_elevation_model_file : " +
              str(input_digital_elevation_model_file) + endC)
        print(cyan + "    classesOfWaterHeights() : " + endC +
              "output_heights_classes_file : " +
              str(output_heights_classes_file) + endC)
        print(cyan + "    classesOfWaterHeights() : " + endC +
              "output_heights_classes_vector : " +
              str(output_heights_classes_vector) + endC)
        print(cyan + "    classesOfWaterHeights() : " + endC +
              "heights_classes : " + str(heights_classes) + endC)
        print(cyan + "    classesOfWaterHeights() : " + endC + "epsg : " +
              str(epsg) + endC)
        print(cyan + "    classesOfWaterHeights() : " + endC +
              "no_data_value : " + str(no_data_value) + endC)
        print(cyan + "    classesOfWaterHeights() : " + endC +
              "format_raster : " + str(format_raster) + endC)
        print(cyan + "    classesOfWaterHeights() : " + endC +
              "format_vector : " + str(format_vector) + endC)
        print(cyan + "    classesOfWaterHeights() : " + endC +
              "extension_raster : " + str(extension_raster) + endC)
        print(cyan + "    classesOfWaterHeights() : " + endC +
              "extension_vector : " + str(extension_vector) + endC)
        print(cyan + "    classesOfWaterHeights() : " + endC +
              "grass_gisbase : " + str(grass_gisbase) + endC)
        print(cyan + "    classesOfWaterHeights() : " + endC +
              "grass_gisdb : " + str(grass_gisdb) + endC)
        print(cyan + "    classesOfWaterHeights() : " + endC +
              "grass_location : " + str(grass_location) + endC)
        print(cyan + "    classesOfWaterHeights() : " + endC +
              "grass_mapset : " + str(grass_mapset) + endC)
        print(cyan + "    classesOfWaterHeights() : " + endC +
              "path_time_log : " + str(path_time_log) + endC)
        print(cyan + "    classesOfWaterHeights() : " + endC +
              "save_results_intermediate : " + str(save_results_intermediate) +
              endC)
        print(cyan + "    classesOfWaterHeights() : " + endC + "overwrite : " +
              str(overwrite) + endC + '\n')

    # Définition des constantes
    ENCODING_RASTER_FLOAT = 'float'
    ENCODING_RASTER_UINT8 = 'uint8'
    EXTENSION_RASTER_SAGA = '.sdat'
    FORMAT_VECTOR_GRASS = format_vector.replace(' ', '_')
    SUFFIX_TEMP = '_temp'
    SUFFIX_LINES = '_lines'
    SUFFIX_POINTS = '_points'
    SUFFIX_ALTI = '_altitude'
    SUFFIX_CUT = '_cut'
    SUFFIX_RAW = '_raw_heights'
    INDEX_FIELD = 'idx'
    ALTI_FIELD = 'altitude'
    VECTORISATION = 'GRASS'

    # Mise à jour du log
    starting_event = "classesOfWaterHeights() : Début du traitement : "
    timeLine(path_time_log, starting_event)

    print(cyan + "classesOfWaterHeights() : " + bold + green +
          "DEBUT DES TRAITEMENTS" + endC + '\n')

    # Définition des variables 'basename'
    flooded_areas_basename = os.path.splitext(
        os.path.basename(input_flooded_areas_vector))[0]
    digital_elevation_model_basename = os.path.splitext(
        os.path.basename(input_digital_elevation_model_file))[0]
    flooded_areas_lines_basename = flooded_areas_basename + SUFFIX_LINES
    flooded_areas_points_basename = flooded_areas_basename + SUFFIX_POINTS
    if output_heights_classes_file != "":
        output_heights_classes_basename = os.path.splitext(
            os.path.basename(output_heights_classes_file))[0]
        output_dirname = os.path.dirname(output_heights_classes_file)
    else:
        output_heights_classes_basename = os.path.splitext(
            os.path.basename(output_heights_classes_vector))[0]
        output_dirname = os.path.dirname(output_heights_classes_vector)

    # Définition des variables temp
    temp_directory = output_dirname + os.sep + output_heights_classes_basename + SUFFIX_TEMP
    flooded_areas_lines = temp_directory + os.sep + flooded_areas_lines_basename + extension_vector
    flooded_areas_points = temp_directory + os.sep + flooded_areas_points_basename + extension_vector
    altitude_points = temp_directory + os.sep + flooded_areas_points_basename + SUFFIX_ALTI + extension_vector
    altitude_grid = temp_directory + os.sep + flooded_areas_basename + SUFFIX_ALTI + EXTENSION_RASTER_SAGA
    altitude_file = temp_directory + os.sep + flooded_areas_basename + SUFFIX_ALTI + SUFFIX_CUT + extension_raster
    digital_elevation_model_cut = temp_directory + os.sep + digital_elevation_model_basename + SUFFIX_CUT + extension_raster
    raw_heights = temp_directory + os.sep + flooded_areas_basename + SUFFIX_RAW + extension_raster
    heights_classes_temp = temp_directory + os.sep + output_heights_classes_basename + extension_raster
    if output_heights_classes_file == "":
        output_heights_classes_file = output_dirname + os.sep + output_heights_classes_basename + extension_raster

    # Nettoyage des traitements précédents
    if debug >= 3:
        print(cyan + "classesOfWaterHeights() : " + endC +
              "Nettoyage des traitements précédents." + endC + '\n')
    removeFile(output_heights_classes_file)
    removeVectorFile(output_heights_classes_vector,
                     format_vector=format_vector)
    cleanTempData(temp_directory)

    #############
    # Etape 0/6 # Préparation des traitements
    #############

    print(cyan + "classesOfWaterHeights() : " + bold + green +
          "ETAPE 0/6 - Début de la préparation des traitements." + endC + '\n')

    # Préparation de GRASS
    xmin, xmax, ymin, ymax = getEmpriseImage(
        input_digital_elevation_model_file)
    pixel_width, pixel_height = getPixelWidthXYImage(
        input_digital_elevation_model_file)
    grass_gisbase, grass_gisdb, grass_location, grass_mapset = initializeGrass(
        temp_directory,
        xmin,
        xmax,
        ymin,
        ymax,
        pixel_width,
        pixel_height,
        projection=epsg,
        gisbase=grass_gisbase,
        gisdb=grass_gisdb,
        location=grass_location,
        mapset=grass_mapset,
        clean_old=True,
        overwrite=overwrite)

    # Gestion des classes de hauteurs d'eau
    thresholds_list = heights_classes.split(',')
    thresholds_list_float = [float(x) for x in thresholds_list]
    thresholds_list_float.sort()
    thresholds_list_float_len = len(thresholds_list_float)

    print(cyan + "classesOfWaterHeights() : " + bold + green +
          "ETAPE 0/6 - Fin de la préparation des traitements." + endC + '\n')

    #############
    # Etape 1/6 # Création de points sur le périmètre de l'emprise inondée
    #############

    print(
        cyan + "classesOfWaterHeights() : " + bold + green +
        "ETAPE 1/6 - Début de la création de points sur le périmètre de l'emprise inondée."
        + endC + '\n')

    # Conversion de l'emprise inondée en polylignes
    convertePolygon2Polylines(input_flooded_areas_vector,
                              flooded_areas_lines,
                              overwrite=overwrite,
                              format_vector=format_vector)

    # Création de points le long du polyligne
    use = 'vertex'
    dmax = 10
    percent = False
    importVectorOgr2Grass(flooded_areas_lines,
                          flooded_areas_lines_basename,
                          overwrite=overwrite)
    pointsAlongPolylines(flooded_areas_lines_basename,
                         flooded_areas_points_basename,
                         use=use,
                         dmax=dmax,
                         percent=percent,
                         overwrite=overwrite)
    exportVectorOgr2Grass(flooded_areas_points_basename,
                          flooded_areas_points,
                          format_vector=FORMAT_VECTOR_GRASS,
                          overwrite=overwrite)

    # Ajout d'un index sur les points
    addNewFieldVector(flooded_areas_points,
                      INDEX_FIELD,
                      ogr.OFTInteger,
                      field_value=None,
                      field_width=None,
                      field_precision=None,
                      format_vector=format_vector)
    updateIndexVector(flooded_areas_points,
                      index_name=INDEX_FIELD,
                      format_vector=format_vector)

    print(
        cyan + "classesOfWaterHeights() : " + bold + green +
        "ETAPE 1/6 - Fin de la création de points sur le périmètre de l'emprise inondée."
        + endC + '\n')

    #############
    # Etape 2/6 # Récupération de l'altitude sous chaque point
    #############

    print(
        cyan + "classesOfWaterHeights() : " + bold + green +
        "ETAPE 2/6 - Début de la récupération de l'altitude sous chaque point."
        + endC + '\n')

    # Ajout d'un champ pour récupérer l'altitude
    addNewFieldVector(flooded_areas_points,
                      ALTI_FIELD,
                      ogr.OFTReal,
                      field_value=None,
                      field_width=None,
                      field_precision=None,
                      format_vector=format_vector)

    # Echantillonnage du MNT sous le fichier points
    importVectorOgr2Grass(flooded_areas_points,
                          flooded_areas_points_basename,
                          overwrite=overwrite)
    importRasterGdal2Grass(input_digital_elevation_model_file,
                           digital_elevation_model_basename,
                           overwrite=overwrite)
    sampleRasterUnderPoints(flooded_areas_points_basename,
                            digital_elevation_model_basename,
                            ALTI_FIELD,
                            overwrite=overwrite)
    exportVectorOgr2Grass(flooded_areas_points_basename,
                          altitude_points,
                          format_vector=FORMAT_VECTOR_GRASS,
                          overwrite=overwrite)

    print(
        cyan + "classesOfWaterHeights() : " + bold + green +
        "ETAPE 2/6 - Fin de la récupération de l'altitude sous chaque point." +
        endC + '\n')

    #############
    # Etape 3/6 # Triangulation de l'altitude
    #############

    print(cyan + "classesOfWaterHeights() : " + bold + green +
          "ETAPE 3/6 - Début de la triangulation de l'altitude." + endC + '\n')

    pixel_size = abs(min(pixel_width, pixel_height))
    triangulationDelaunay(altitude_points,
                          altitude_grid,
                          ALTI_FIELD,
                          cellsize=pixel_size)

    print(cyan + "classesOfWaterHeights() : " + bold + green +
          "ETAPE 3/6 - Fin de la triangulation de l'altitude." + endC + '\n')

    #############
    # Etape 4/6 # Calcul des hauteurs brutes
    #############

    print(cyan + "classesOfWaterHeights() : " + bold + green +
          "ETAPE 4/6 - Début du calcul des hauteurs brutes." + endC + '\n')

    # Redécoupage sur l'emprise inondée
    cutRasterImages([altitude_grid, input_digital_elevation_model_file],
                    input_flooded_areas_vector,
                    [altitude_file, digital_elevation_model_cut],
                    0,
                    0,
                    epsg,
                    no_data_value,
                    "",
                    False,
                    path_time_log,
                    format_raster=format_raster,
                    format_vector=format_vector,
                    extension_raster=extension_raster,
                    extension_vector=extension_vector,
                    save_results_intermediate=save_results_intermediate,
                    overwrite=overwrite)

    # BandMath pour les hauteurs brutes (triangulation - MNT)
    expression = "im1b1 - im2b1"
    rasterCalculator([altitude_file, digital_elevation_model_cut],
                     raw_heights,
                     expression,
                     codage=ENCODING_RASTER_FLOAT)

    print(cyan + "classesOfWaterHeights() : " + bold + green +
          "ETAPE 4/6 - Fin du calcul des hauteurs brutes." + endC + '\n')

    #############
    # Etape 5/6 # Attribution des classes de hauteurs d'eau
    #############

    print(cyan + "classesOfWaterHeights() : " + bold + green +
          "ETAPE 5/6 - Début de l'attribution des classes de hauteurs d'eau." +
          endC + '\n')

    # Génération de l'expression
    expression = ""
    for i in range(thresholds_list_float_len - 1):
        min_threshold = thresholds_list_float[i]
        max_threshold = thresholds_list_float[i + 1]
        expression += "im1b1>=%s and im1b1<%s ? %s : " % (min_threshold,
                                                          max_threshold, i + 1)
    expression += "im1b1>=%s ? %s : 0" % (thresholds_list_float[
        thresholds_list_float_len - 1], thresholds_list_float_len)

    # Calcul des classes de hauteurs d'eau
    rasterCalculator([raw_heights],
                     heights_classes_temp,
                     expression,
                     codage=ENCODING_RASTER_UINT8)

    # Redécoupage propre des zones en dehors de l'emprise inondée
    cutImageByVector(input_flooded_areas_vector,
                     heights_classes_temp,
                     output_heights_classes_file,
                     pixel_size_x=pixel_width,
                     pixel_size_y=pixel_height,
                     no_data_value=no_data_value,
                     epsg=epsg,
                     format_raster=format_raster,
                     format_vector=format_vector)

    print(cyan + "classesOfWaterHeights() : " + bold + green +
          "ETAPE 5/6 - Fin de l'attribution des classes de hauteurs d'eau." +
          endC + '\n')

    #############
    # Etape 6/6 # Vectorisation des classes de hauteurs d'eau
    #############

    if output_heights_classes_vector != "":

        print(
            cyan + "classesOfWaterHeights() : " + bold + green +
            "ETAPE 6/6 - Début de la vectorisation des classes de hauteurs d'eau."
            + endC + '\n')

        name_column = 'class'
        umc_list = 0

        if VECTORISATION == 'GRASS':
            vectorizeGrassClassification(
                output_heights_classes_file,
                output_heights_classes_vector,
                name_column, [umc_list],
                False,
                True,
                True,
                input_flooded_areas_vector,
                True,
                path_time_log,
                expression="",
                format_vector=format_vector,
                extension_raster=extension_raster,
                extension_vector=extension_vector,
                save_results_intermediate=save_results_intermediate,
                overwrite=overwrite)
        else:
            vectorizeClassification(
                output_heights_classes_file,
                output_heights_classes_vector,
                name_column, [umc_list],
                2000,
                False,
                True,
                True,
                True,
                True,
                True,
                input_flooded_areas_vector,
                True,
                False,
                False, [0],
                path_time_log,
                expression="",
                format_vector=format_vector,
                extension_raster=extension_raster,
                extension_vector=extension_vector,
                save_results_intermediate=save_results_intermediate,
                overwrite=overwrite)

        print(
            cyan + "classesOfWaterHeights() : " + bold + green +
            "ETAPE 6/6 - Fin de la vectorisation des classes de hauteurs d'eau."
            + endC + '\n')

    else:
        print(
            cyan + "classesOfWaterHeights() : " + bold + yellow +
            "ETAPE 6/6 - Pas de vectorisation des classes de hauteurs d'eau demandée."
            + endC + '\n')

    # Suppression des fichiers temporaires
    if not save_results_intermediate:
        if debug >= 3:
            print(cyan + "classesOfWaterHeights() : " + endC +
                  "Suppression des fichiers temporaires." + endC + '\n')
        deleteDir(temp_directory)

    print(cyan + "classesOfWaterHeights() : " + bold + green +
          "FIN DES TRAITEMENTS" + endC + '\n')

    # Mise à jour du log
    ending_event = "classesOfWaterHeights() : Fin du traitement : "
    timeLine(path_time_log, ending_event)

    return
def aspectRatio(grid_input,
                grid_output,
                roads_input,
                built_input,
                seg_dist,
                seg_length,
                buffer_size,
                epsg,
                project_encoding,
                server_postgis,
                port_number,
                user_postgis,
                password_postgis,
                database_postgis,
                schema_postgis,
                path_time_log,
                format_vector='ESRI Shapefile',
                extension_vector=".shp",
                save_results_intermediate=False,
                overwrite=True):

    print(bold + yellow + "Début du calcul de l'indicateur Aspect Ratio." +
          endC + "\n")
    timeLine(path_time_log, "Début du calcul de l'indicateur Aspect Ratio : ")

    if debug >= 3:
        print(bold + green + "aspectRatio() : Variables dans la fonction" +
              endC)
        print(cyan + "aspectRatio() : " + endC + "grid_input : " +
              str(grid_input) + endC)
        print(cyan + "aspectRatio() : " + endC + "grid_output : " +
              str(grid_output) + endC)
        print(cyan + "aspectRatio() : " + endC + "roads_input : " +
              str(roads_input) + endC)
        print(cyan + "aspectRatio() : " + endC + "built_input : " +
              str(built_input) + endC)
        print(cyan + "aspectRatio() : " + endC + "seg_dist : " +
              str(seg_dist) + endC)
        print(cyan + "aspectRatio() : " + endC + "seg_length : " +
              str(seg_length) + endC)
        print(cyan + "aspectRatio() : " + endC + "buffer_size : " +
              str(buffer_size) + endC)
        print(cyan + "aspectRatio() : " + endC + "epsg : " + str(epsg) + endC)
        print(cyan + "aspectRatio() : " + endC + "project_encoding : " +
              str(project_encoding) + endC)
        print(cyan + "aspectRatio() : " + endC + "server_postgis : " +
              str(server_postgis) + endC)
        print(cyan + "aspectRatio() : " + endC + "port_number : " +
              str(port_number) + endC)
        print(cyan + "aspectRatio() : " + endC + "user_postgis : " +
              str(user_postgis) + endC)
        print(cyan + "aspectRatio() : " + endC + "password_postgis : " +
              str(password_postgis) + endC)
        print(cyan + "aspectRatio() : " + endC + "database_postgis : " +
              str(database_postgis) + endC)
        print(cyan + "aspectRatio() : " + endC + "schema_postgis : " +
              str(schema_postgis) + endC)
        print(cyan + "aspectRatio() : " + endC + "format_vector : " +
              str(format_vector) + endC)
        print(cyan + "aspectRatio() : " + endC + "extension_vector : " +
              str(extension_vector) + endC)
        print(cyan + "aspectRatio() : " + endC + "path_time_log : " +
              str(path_time_log) + endC)
        print(cyan + "aspectRatio() : " + endC +
              "save_results_intermediate : " + str(save_results_intermediate) +
              endC)
        print(cyan + "aspectRatio() : " + endC + "overwrite : " +
              str(overwrite) + endC)

    # Constantes liées aux données PostGIS et GRASS
    LOCATION = "LOCATION"
    MAPSET = "MAPSET"
    SUFFIX_SEGMENTED = "_segmented"

    if not os.path.exists(grid_output) or overwrite:

        ############################################
        ### Préparation générale des traitements ###
        ############################################

        if os.path.exists(grid_output):
            removeVectorFile(grid_output)

        temp_path = os.path.dirname(grid_output) + os.sep + "AspectRatio"
        file_name = os.path.splitext(os.path.basename(roads_input))[0]
        roads_segmented = temp_path + os.sep + file_name + SUFFIX_SEGMENTED + extension_vector

        if os.path.exists(temp_path):
            shutil.rmtree(temp_path)

        # Variables d'environnement spécifiques à GRASS
        gisbase = os.environ['GISBASE']
        gisdb = "GRASS_database"

        # Variables liées à GRASS permettant la construction de 'LOCATION' et 'MAPSET'
        xmin, xmax, ymin, ymax = getEmpriseFile(roads_input, format_vector)
        pixel_size_x, pixel_size_y = 1, 1

        #####################################
        ### Préparation géodatabase GRASS ###
        #####################################

        print(bold + cyan + "Préparation de la géodatabase GRASS :" + endC)
        timeLine(path_time_log, "    Préparation de la géodatabase GRASS : ")

        # Initialisation de la connexion à la géodatabase GRASS
        gisbase, gisdb, location, mapset = initializeGrass(
            temp_path, xmin, xmax, ymin, ymax, pixel_size_x, pixel_size_y,
            epsg, gisbase, gisdb, LOCATION, MAPSET, True, overwrite)

        ###################################################
        ### Division des routes en segments de x mètres ###
        ###################################################

        print(bold + cyan + "Segmentation des routes avec GRASS :" + endC)
        timeLine(path_time_log, "    Segmentation des routes avec GRASS : ")

        # Segmentation du jeu de données route en segments de x mètres
        splitGrass(roads_input, roads_segmented, seg_dist, format_vector,
                   overwrite)
        cleanGrass(temp_path, gisdb, save_results_intermediate)

        ###########################################
        ### Préparation base de données PostGIS ###
        ###########################################

        print(bold + cyan + "Préparation de la base de données PostGIS :" +
              endC)
        timeLine(path_time_log,
                 "    Préparation de la base de données PostGIS : ")

        # Création de la base de données PostGIS
        # Conflits avec autres indicateurs (Height of Roughness Elements / Terrain Roughness Class)
        createDatabase(database_postgis)

        # Import des fichiers shapes maille, bati et routes segmentées dans la base de données PostGIS
        table_name_maille = importVectorByOgr2ogr(database_postgis,
                                                  grid_input,
                                                  'ara_maille',
                                                  user_name=user_postgis,
                                                  password=password_postgis,
                                                  ip_host=server_postgis,
                                                  num_port=str(port_number),
                                                  schema_name=schema_postgis,
                                                  epsg=str(epsg),
                                                  codage=project_encoding)
        table_name_bati = importVectorByOgr2ogr(database_postgis,
                                                built_input,
                                                'ara_bati',
                                                user_name=user_postgis,
                                                password=password_postgis,
                                                ip_host=server_postgis,
                                                num_port=str(port_number),
                                                schema_name=schema_postgis,
                                                epsg=str(epsg),
                                                codage=project_encoding)
        table_name_routes_seg = importVectorByOgr2ogr(
            database_postgis,
            roads_segmented,
            'ara_routes_seg',
            user_name=user_postgis,
            password=password_postgis,
            ip_host=server_postgis,
            num_port=str(port_number),
            schema_name=schema_postgis,
            epsg=str(epsg),
            codage=project_encoding)

        # Connection à la base de données PostGIS et initialisation de 'cursor' (permet de récupérer des résultats de requêtes SQL pour les traiter en Python)
        connection = openConnection(database_postgis,
                                    user_postgis,
                                    password_postgis,
                                    server_postgis,
                                    str(port_number),
                                    schema_name=schema_postgis)
        cursor = connection.cursor()

        # Requête d'ajout de champ ID segment route dans la table routes_seg et création des index pour les shapes importés
        query_preparation = """
        ALTER TABLE %s ADD COLUMN id_seg serial;
        --CREATE INDEX IF NOT EXISTS routes_seg_geom_gist ON %s USING GIST (geom);
        --CREATE INDEX IF NOT EXISTS bati_geom_gist ON %s USING GIST (geom);
        --CREATE INDEX IF NOT EXISTS maille_geom_gist ON %s USING GIST (geom);
        """ % (table_name_routes_seg, table_name_routes_seg, table_name_bati,
               table_name_maille)
        if debug >= 2:
            print(query_preparation)
        executeQuery(connection, query_preparation)

        ##############################################################################
        ### Création des segments de y mètres perpendiculaires aux segments routes ###
        ##############################################################################

        print(bold + cyan +
              "Création des segments perpendiculaires aux routes :" + endC)
        timeLine(path_time_log,
                 "    Création des segments perpendiculaires aux routes : ")

        # Début de la construction de la requête de création des segments perpendiculaires
        query_seg_perp = "DROP TABLE IF EXISTS ara_seg_perp;\n"
        query_seg_perp += "CREATE TABLE ara_seg_perp (id_seg text, id_perp text, xR float, yR float, xP float, yP float, geom geometry);\n"
        query_seg_perp += "INSERT INTO ara_seg_perp VALUES\n"

        # Récupération de la liste des identifiants segments routes
        cursor.execute(
            "SELECT id_seg FROM %s GROUP BY id_seg ORDER BY id_seg;" %
            table_name_routes_seg)
        id_seg_list = cursor.fetchall()

        # Boucle sur les segments routes
        nb_seg = len(id_seg_list)
        treat_seg = 1
        for id_seg in id_seg_list:
            if debug >= 4:
                print(bold + "    Traitement du segment route : " + endC +
                      str(treat_seg) + "/" + str(nb_seg))

            id_seg = id_seg[0]

            # Table temporaire ne contenant qu'un segment route donné : ST_LineMerge(geom) permet de passer la géométrie de MultiLineString à LineString, pour éviter des problèmes de requêtes spatiales
            query_temp1_seg = "DROP TABLE IF EXISTS ara_temp1_seg;\n"
            query_temp1_seg += "CREATE TABLE ara_temp1_seg AS SELECT id_seg as id_seg, ST_LineMerge(geom) as geom FROM %s WHERE id_seg = %s;\n" % (
                table_name_routes_seg, id_seg)
            if debug >= 4:
                print(query_temp1_seg)
            executeQuery(connection, query_temp1_seg)

            # Récupération du nombre de sommets du segment route (utile pour les segments routes en courbe, permet de récupérer le dernier point du segment)
            cursor.execute("SELECT ST_NPoints(geom) FROM ara_temp1_seg;")
            nb_points = cursor.fetchone()

            # Récupération des coordonnées X et Y des points extrémités du segment route
            query_xR1 = "SELECT ST_X(geom) as X FROM (SELECT ST_AsText(ST_PointN(geom,1)) as geom FROM ara_temp1_seg) as toto;"
            cursor.execute(query_xR1)
            xR1 = cursor.fetchone()
            query_yR1 = "SELECT ST_Y(geom) as Y FROM (SELECT ST_AsText(ST_PointN(geom,1)) as geom FROM ara_temp1_seg) as toto;"
            cursor.execute(query_yR1)
            yR1 = cursor.fetchone()
            query_xR2 = "SELECT ST_X(geom) as X FROM (SELECT ST_AsText(ST_PointN(geom,%s)) as geom FROM ara_temp1_seg) as toto;" % (
                nb_points)
            cursor.execute(query_xR2)
            xR2 = cursor.fetchone()
            query_yR2 = "SELECT ST_Y(geom) as Y FROM (SELECT ST_AsText(ST_PointN(geom,%s)) as geom FROM ara_temp1_seg) as toto;" % (
                nb_points)
            cursor.execute(query_yR2)
            yR2 = cursor.fetchone()

            # Transformation des coordonnées X et Y des points extrémités du segment route en valeurs numériques
            xR1 = float(str(xR1)[1:-2])
            yR1 = float(str(yR1)[1:-2])
            xR2 = float(str(xR2)[1:-2])
            yR2 = float(str(yR2)[1:-2])
            if debug >= 4:
                print("      xR1 = " + str(xR1))
                print("      yR1 = " + str(yR1))
                print("      xR2 = " + str(xR2))
                print("      yR2 = " + str(yR2))

            # Calcul des delta X et Y entre les points extrémités du segment route
            dxR = xR1 - xR2
            dyR = yR1 - yR2
            if debug >= 4:
                print("      dxR = " + str(dxR))
                print("      dyR = " + str(dyR))
                print("\n")

            # Suppression des cas où le script créé des perpendiculaires tous les cm ! Bug lié à la segmentation des routes
            dist_R1_R2 = math.sqrt((abs(dxR)**2) + (abs(dyR)**2))
            if dist_R1_R2 >= (seg_dist / 2):

                # Calcul de l'angle (= gisement) entre le Nord et le segment route
                if dxR == 0 or dyR == 0:
                    if dxR == 0 and dyR > 0:
                        aR = 0
                    elif dxR > 0 and dyR == 0:
                        aR = 90
                    elif dxR == 0 and dyR < 0:
                        aR = 180
                    elif dxR < 0 and dyR == 0:
                        aR = 270
                else:
                    aR = math.degrees(math.atan(dxR / dyR))
                    if aR < 0:
                        aR = aR + 360
                if debug >= 4:
                    print("      aR = " + str(aR))

                # Calcul des angles (= gisements) entre le Nord et les 2 segments perpendiculaires au segment route
                aP1 = aR + 90
                if aP1 < 0:
                    aP1 = aP1 + 360
                if aP1 >= 360:
                    aP1 = aP1 - 360
                aP2 = aR - 90
                if aP2 < 0:
                    aP2 = aP2 + 360
                if aP2 >= 360:
                    aP2 = aP2 - 360
                if debug >= 4:
                    print("      aP1 = " + str(aP1))
                    print("      aP2 = " + str(aP2))

                # Calculs des coordonnées des nouveaux points à l'extrémité de chaque segment perpendiculaire pour le segment route sélectionné
                xP1 = xR1 + (seg_length * math.sin(math.radians(aP1)))
                yP1 = yR1 + (seg_length * math.cos(math.radians(aP1)))
                xP2 = xR1 + (seg_length * math.sin(math.radians(aP2)))
                yP2 = yR1 + (seg_length * math.cos(math.radians(aP2)))
                if debug >= 4:
                    print("      xP1 = " + str(xP1))
                    print("      yP1 = " + str(yP1))
                    print("      xP2 = " + str(xP2))
                    print("      yP2 = " + str(yP2))
                    print("\n")

                # Construction de la requête de création des 2 segments perpendiculaires pour le segment route sélectionné
                query_seg_perp += "    ('%s', '%s_perp1', %s, %s, %s, %s, 'LINESTRING(%s %s, %s %s)'),\n" % (
                    str(id_seg), str(id_seg), xR1, yR1, xP1, yP1, xR1, yR1,
                    xP1, yP1)
                query_seg_perp += "    ('%s', '%s_perp2', %s, %s, %s, %s, 'LINESTRING(%s %s, %s %s)'),\n" % (
                    str(id_seg), str(id_seg), xR1, yR1, xP2, yP2, xR1, yR1,
                    xP2, yP2)

            treat_seg += 1

        # Fin de la construction de la requête de création des segments perpendiculaires et exécution de cette requête
        query_seg_perp = query_seg_perp[:-2] + ";\n"  # Transformer la virgule de la dernière ligne SQL en point-virgule (pour terminer la requête)
        query_seg_perp += "ALTER TABLE ara_seg_perp ALTER COLUMN geom TYPE geometry(LINESTRING,%s) USING ST_SetSRID(geom,%s);\n" % (
            epsg, epsg)  # Mise à jour du système de coordonnées
        query_seg_perp += "CREATE INDEX IF NOT EXISTS seg_perp_geom_gist ON ara_seg_perp USING GIST (geom);\n"
        if debug >= 2:
            print(query_seg_perp)
        executeQuery(connection, query_seg_perp)

        ###################################################
        ### Intersect segments perpendiculaires et bâti ###
        ###################################################

        print(
            bold + cyan +
            "Intersect entre les segments perpendiculaires et les bâtiments :"
            + endC)
        timeLine(
            path_time_log,
            "    Intersect entre les segments perpendiculaires et les bâtiments : "
        )

        # Requête d'intersect entre les segments perpendiculaires et les bâtiments
        query_intersect = """
        DROP TABLE IF EXISTS ara_intersect_bati;
        CREATE TABLE ara_intersect_bati AS
            SELECT r.id_seg as id_seg, r.id_perp as id_perp, r.xR as xR, r.yR as yR, b.HAUTEUR as haut_bati, ST_Intersection(r.geom, b.geom) as geom
            FROM ara_seg_perp as r, %s as b
            WHERE ST_Intersects(r.geom, b.geom);
        ALTER TABLE ara_intersect_bati ADD COLUMN id_intersect serial;
        CREATE INDEX IF NOT EXISTS intersect_bati_geom_gist ON ara_intersect_bati USING GIST (geom);
        """ % table_name_bati
        if debug >= 2:
            print(query_intersect)
        executeQuery(connection, query_intersect)

        ##################################################################################################################
        ### Récupération des demi-largeurs de rue et de la hauteur du 1er bâtiment pour chaque segment perpendiculaire ###
        ##################################################################################################################

        print(
            bold + cyan +
            "Récupération des informations nécessaires au calcul du rapport d'aspect :"
            + endC)
        timeLine(
            path_time_log,
            "    Récupération des informations nécessaires au calcul du rapport d'aspect : "
        )

        # Début de la construction de la requête de création des points route, avec infos de demi-largeurs de rue et hauteurs des bâtiments
        query_pt_route = "DROP TABLE IF EXISTS ara_asp_ratio_by_seg;\n"
        query_pt_route += "CREATE TABLE ara_asp_ratio_by_seg (id_seg text, xR float, yR float, width1 float, height1 float, width2 float, height2 float, geom geometry);\n"
        query_pt_route += "INSERT INTO ara_asp_ratio_by_seg VALUES\n"

        # Récupération de la liste des identifiants segments routes (uniquement ceux qui intersectent du bâti)
        cursor.execute(
            "SELECT id_seg FROM ara_intersect_bati GROUP BY id_seg ORDER BY id_seg;"
        )
        id_seg_list = cursor.fetchall()

        # Boucle sur les segments routes
        nb_seg = len(id_seg_list)
        treat_seg = 1
        for id_seg in id_seg_list:
            if debug >= 4:
                print(bold + "    Traitement du segment route : " + endC +
                      str(treat_seg) + "/" + str(nb_seg))

            id_seg = id_seg[0]

            # Table temporaire ne contenant que les intersects d'un segment route donné
            query_temp2_seg = "DROP TABLE IF EXISTS ara_temp2_seg;\n"
            query_temp2_seg += "CREATE TABLE ara_temp2_seg AS SELECT id_seg, id_perp, xR, yR, haut_bati, id_intersect, geom FROM ara_intersect_bati WHERE id_seg = '%s';\n" % (
                id_seg)
            if debug >= 4:
                print(query_temp2_seg)
            executeQuery(connection, query_temp2_seg)

            # Récupération des coordonnées X et Y du point route du segment route associé
            cursor.execute("SELECT xR FROM ara_temp2_seg;")
            xR = cursor.fetchone()
            cursor.execute("SELECT yR FROM ara_temp2_seg;")
            yR = cursor.fetchone()

            # Transformation des coordonnées X et Y du point route du segment route associé en valeurs numériques
            xR = float(str(xR)[1:-2])
            yR = float(str(yR)[1:-2])
            if debug >= 4:
                print("      xR = " + str(xR))
                print("      yR = " + str(yR))
                print("\n")

            # Initialisation des variables demi-largeurs de rue et hauteur du 1er bâtiment intersecté
            w1 = 0
            h1 = 0
            w2 = 0
            h2 = 0

            # Récupération de la liste des identifiants segments perpendiculaires de la table temp2_seg
            cursor.execute(
                "SELECT id_perp FROM ara_temp2_seg GROUP BY id_perp ORDER BY id_perp;"
            )
            id_perp_list = cursor.fetchall()

            # Boucle sur les perpendiculaires (max 2) d'un segment route donné
            for id_perp in id_perp_list:

                # Récupération du numéro de perpendiculaire (1 ou 2 ~ droite ou gauche)
                num_seg = float(str(id_perp)[-4:-3])

                # Initialisation de listes contenant les demi-largeurs de rue et les hauteurs des bâtiments intersectés
                length_list = []
                haut_bati_list = []

                # Table temporaire ne contenant que les intersects d'un segment perpendiculaire donné
                query_temp2_perp = "DROP TABLE IF EXISTS ara_temp2_perp;\n"
                query_temp2_perp += "CREATE TABLE ara_temp2_perp AS SELECT id_seg, id_perp, xR, yR, haut_bati, id_intersect, geom FROM ara_temp2_seg WHERE id_perp = '%s';\n" % (
                    id_perp)
                if debug >= 4:
                    print(query_temp2_perp)
                executeQuery(connection, query_temp2_perp)

                # Récupération de la liste des identifiants segments intersects de la table temp2_perp
                cursor.execute(
                    "SELECT id_intersect FROM ara_temp2_perp GROUP BY id_intersect ORDER BY id_intersect;"
                )
                id_intersect_list = cursor.fetchall()

                # Boucle sur les intersects d'un segment perpendiculaire donné, d'un segment route donné
                for id_intersect in id_intersect_list:

                    # Récupération des coordonnées X et Y des points extrémités de chaque segment intersect
                    query_xI1 = "SELECT ST_X(geom) as X FROM (SELECT ST_AsText(ST_PointN(geom,1)) as geom FROM ara_temp2_perp WHERE id_intersect = '%s') as toto;" % (
                        id_intersect)
                    cursor.execute(query_xI1)
                    xI1 = cursor.fetchone()
                    query_yI1 = "SELECT ST_Y(geom) as Y FROM (SELECT ST_AsText(ST_PointN(geom,1)) as geom FROM ara_temp2_perp WHERE id_intersect = '%s') as toto;" % (
                        id_intersect)
                    cursor.execute(query_yI1)
                    yI1 = cursor.fetchone()
                    query_xI2 = "SELECT ST_X(geom) as X FROM (SELECT ST_AsText(ST_PointN(geom,2)) as geom FROM ara_temp2_perp WHERE id_intersect = '%s') as toto;" % (
                        id_intersect)
                    cursor.execute(query_xI2)
                    xI2 = cursor.fetchone()
                    query_yI2 = "SELECT ST_Y(geom) as Y FROM (SELECT ST_AsText(ST_PointN(geom,2)) as geom FROM ara_temp2_perp WHERE id_intersect = '%s') as toto;" % (
                        id_intersect)
                    cursor.execute(query_yI2)
                    yI2 = cursor.fetchone()

                    # Transformation des coordonnées X et Y des points extrémités de chaque segment intersect en valeurs numériques
                    try:
                        xI1 = float(str(xI1)[1:-2])
                        yI1 = float(str(yI1)[1:-2])
                        xI2 = float(str(xI2)[1:-2])
                        yI2 = float(str(yI2)[1:-2])
                    except ValueError:  # Python renvoie une valeur Null pour les bâtiments en U (= intersectés à plus de 2 points)
                        xI1 = yI1 = xI2 = yI2 = 0
                    if debug >= 4:
                        print("              xI1 = " + str(xI1))
                        print("              yI1 = " + str(yI1))
                        print("              xI2 = " + str(xI2))
                        print("              yI2 = " + str(yI2))

                    # Calcul des distances entre la route et chaque point du segment intersect
                    length_intersect1 = math.sqrt((abs(xR - xI1)**2) +
                                                  (abs(yR - yI1)**2))
                    length_intersect2 = math.sqrt((abs(xR - xI2)**2) +
                                                  (abs(yR - yI2)**2))
                    if debug >= 4:
                        print("              length_intersect1 = " +
                              str(length_intersect1))
                        print("              length_intersect2 = " +
                              str(length_intersect2))

                    # Récupération de la valeur de distance entre la route et le point d'intersect le plus proche (+ ajout dans la liste length_list)
                    length = min(length_intersect1, length_intersect2)
                    length_list.append(length)
                    if debug >= 4:
                        print("              length = " + str(length))

                    # Récupération de la hauteur du bâtiment correspondant à l'intersect (+ ajout dans la liste haut_bati_list)
                    query_haut_bati = "SELECT haut_bati FROM ara_temp2_perp WHERE id_intersect = '%s';" % (
                        id_intersect)
                    cursor.execute(query_haut_bati)
                    haut_bati = cursor.fetchone()
                    haut_bati = float(str(haut_bati)[10:-4])
                    haut_bati_list.append(haut_bati)
                    if debug >= 4:
                        print("              haut_bati = " + str(haut_bati))
                        print("\n")

                # Pour un segment perpendiculaire donné, récupération de la distance minimale entre la route et les intersect avec le bâti
                width = min(length_list)
                if debug >= 4:
                    print("          width = " + str(width))

                # Pour un segment perpendiculaire donné, récupération de la hauteur du bâtiment correspondant au segment intersect le plus proche de la route
                height_position = length_list.index(width)
                height = haut_bati_list[height_position]
                if debug >= 4:
                    print("          height = " + str(height))
                    print("\n")

                # MAJ des variables demi-largeurs de rue et hauteur du 1er bâtiment intersecté suivant le côté de la perpendiculaire par rapport à la route
                if num_seg == 1:
                    w1 = width
                    h1 = height
                elif num_seg == 2:
                    w2 = width
                    h2 = height

            # Construction de la requête de création du point route pour le segment route donné
            query_pt_route += "    ('%s', %s, %s, %s, %s, %s, %s, 'POINT(%s %s)'),\n" % (
                str(id_seg), xR, yR, w1, h1, w2, h2, xR, yR)

            treat_seg += 1

        # Fin de la construction de la requête de création des points route, avec infos de demi-largeurs de rue et hauteurs des bâtiments
        query_pt_route = query_pt_route[:-2] + ";\n"  # Transformer la virgule de la dernière ligne SQL en point-virgule (pour terminer la requête)
        query_pt_route += "ALTER TABLE ara_asp_ratio_by_seg ALTER COLUMN geom TYPE geometry(POINT,%s) USING ST_SetSRID(geom,%s);\n" % (
            epsg, epsg)  # Mise à jour du système de coordonnées
        query_pt_route += "CREATE INDEX IF NOT EXISTS asp_ratio_by_seg_geom_gist ON ara_asp_ratio_by_seg USING GIST (geom);\n"
        if debug >= 2:
            print(query_pt_route)
        executeQuery(connection, query_pt_route)

        ###########################################################
        ### Calcul de l'indicateur et export de la table finale ###
        ###########################################################

        print(bold + cyan +
              "Calculs finaux de l'indicateur de rapport d'aspect :" + endC)
        timeLine(path_time_log,
                 "    Calculs finaux de l'indicateur de rapport d'aspect : ")

        # Requête de calcul d'un rapport d'aspect par segment route
        query_asp_ratio_by_seg = """
        ALTER TABLE ara_asp_ratio_by_seg ADD COLUMN asp_ratio float;
        UPDATE ara_asp_ratio_by_seg SET asp_ratio = 0;
        UPDATE ara_asp_ratio_by_seg SET asp_ratio = ((height1 + height2) / 2) / (width1 + width2) WHERE height1 <> 0 AND height2 <> 0 AND width1 <> 0 AND width2 <> 0;
        """
        if debug >= 2:
            print(query_asp_ratio_by_seg)
        executeQuery(connection, query_asp_ratio_by_seg)

        # Requête de bufferisation du fichier maillage
        query_buffer = """
        DROP TABLE IF EXISTS ara_buffer;
        CREATE TABLE ara_buffer AS
            SELECT ID as ID, ST_Buffer(geom, %s) as geom
            FROM %s;
        CREATE INDEX IF NOT EXISTS buffer_geom_gist ON ara_buffer USING GIST (geom);
        """ % (buffer_size, table_name_maille)
        if debug >= 2:
            print(query_buffer)
        executeQuery(connection, query_buffer)

        # Requête de calcul d'un rapport d'aspect par maille (via un intersect sur le maillage bufferisé)
        query_asp_ratio_temp1 = """
        DROP TABLE IF EXISTS ara_asp_ratio_temp1;
        CREATE TABLE ara_asp_ratio_temp1 AS
            SELECT m.ID as ID, avg(r.asp_ratio) as asp_ratio, m.geom as geom
            FROM %s as m, ara_buffer as b, ara_asp_ratio_by_seg as r
            WHERE ST_Intersects(b.geom, r.geom)
                AND m.ID = b.ID
                AND r.asp_ratio > 0
            GROUP BY m.ID, m.geom;
        CREATE INDEX IF NOT EXISTS asp_ratio_temp1_geom_gist ON ara_asp_ratio_temp1 USING GIST (geom);
        """ % (table_name_maille)
        if debug >= 2:
            print(query_asp_ratio_temp1)
        executeQuery(connection, query_asp_ratio_temp1)

        # Rapport d'aspect pour les mailles n'intersectant pas de points-route avec un rapport d'aspect
        query_asp_ratio_temp2 = """
        DROP TABLE IF EXISTS ara_asp_ratio_temp2;
        CREATE TABLE ara_asp_ratio_temp2 AS
            SELECT DISTINCT ID as ID, geom as geom
            FROM %s
            WHERE ID NOT IN
                (SELECT DISTINCT ID
                FROM ara_asp_ratio_temp1);
        ALTER TABLE ara_asp_ratio_temp2 ADD COLUMN asp_ratio float;
        UPDATE ara_asp_ratio_temp2 SET asp_ratio = 0;
        CREATE INDEX IF NOT EXISTS asp_ratio_temp2_geom_gist ON ara_asp_ratio_temp2 USING GIST (geom);
        """ % table_name_maille
        if debug >= 1:
            print(query_asp_ratio_temp2)
        executeQuery(connection, query_asp_ratio_temp2)

        # Fusion des 2 tables précédentes pour retrouver l'ensemble des mailles de départ
        query_asp_ratio = """
        DROP TABLE IF EXISTS ara_asp_ratio;
        CREATE TABLE ara_asp_ratio AS
            SELECT ID, asp_ratio, geom
            FROM ara_asp_ratio_temp1
            UNION
            SELECT ID, asp_ratio, geom
            FROM ara_asp_ratio_temp2;
        ALTER TABLE ara_asp_ratio ALTER COLUMN ID TYPE INTEGER;
        """
        if debug >= 2:
            print(query_asp_ratio)
        executeQuery(connection, query_asp_ratio)
        closeConnection(connection)
        exportVectorByOgr2ogr(database_postgis,
                              grid_output,
                              'ara_asp_ratio',
                              user_name=user_postgis,
                              password=password_postgis,
                              ip_host=server_postgis,
                              num_port=str(port_number),
                              schema_name=schema_postgis,
                              format_type=format_vector)

        ##########################################
        ### Nettoyage des fichiers temporaires ###
        ##########################################

        if not save_results_intermediate:
            if os.path.exists(temp_path):
                shutil.rmtree(temp_path)
            # ~ dropDatabase(database_postgis) # Conflits avec autres indicateurs (Height of Roughness Elements / Terrain Roughness Class)

    else:
        print(bold + magenta + "Le calcul de Aspect Ratio a déjà eu lieu." +
              endC)

    print(bold + yellow + "Fin du calcul de l'indicateur Aspect Ratio." +
          endC + "\n")
    timeLine(path_time_log, "Fin du calcul de l'indicateur Aspect Ratio : ")

    return
Exemplo n.º 30
0
def createMnh(image_mns_input, image_mnt_input, image_threshold_input, vector_emprise_input, image_mnh_output, automatic, bd_road_vector_input_list, bd_road_buff_list, sql_road_expression_list, bd_build_vector_input_list, height_bias, threshold_bd_value, threshold_delta_h, mode_interpolation, method_interpolation, interpolation_bco_radius, simplify_vector_param, epsg, no_data_value, ram_otb, path_time_log, format_raster='GTiff', format_vector='ESRI Shapefile', extension_raster=".tif", extension_vector=".shp", save_results_intermediate=False, overwrite=True):

    # Mise à jour du Log
    starting_event = "createMnh() : MNH creation starting : "
    timeLine(path_time_log,starting_event)

    print(endC)
    print(bold + green + "## START : MNH CREATION" + endC)
    print(endC)

    if debug >= 2:
        print(bold + green + "createMnh() : Variables dans la fonction" + endC)
        print(cyan + "createMnh() : " + endC + "image_mns_input : " + str(image_mns_input) + endC)
        print(cyan + "createMnh() : " + endC + "image_mnt_input : " + str(image_mnt_input) + endC)
        print(cyan + "createMnh() : " + endC + "image_threshold_input : " + str(image_threshold_input) + endC)
        print(cyan + "createMnh() : " + endC + "vector_emprise_input : " + str(vector_emprise_input) + endC)
        print(cyan + "createMnh() : " + endC + "image_mnh_output : " + str(image_mnh_output) + endC)
        print(cyan + "createMnh() : " + endC + "automatic : " + str(automatic) + endC)
        print(cyan + "createMnh() : " + endC + "bd_road_vector_input_list : " + str(bd_road_vector_input_list) + endC)
        print(cyan + "createMnh() : " + endC + "bd_road_buff_list : " + str(bd_road_buff_list) + endC)
        print(cyan + "createMnh() : " + endC + "sql_road_expression_list : " + str(sql_road_expression_list) + endC)
        print(cyan + "createMnh() : " + endC + "bd_build_vector_input_list : " + str(bd_build_vector_input_list) + endC)
        print(cyan + "createMnh() : " + endC + "height_bias : " + str(height_bias) + endC)
        print(cyan + "createMnh() : " + endC + "threshold_bd_value : " + str(threshold_bd_value) + endC)
        print(cyan + "createMnh() : " + endC + "threshold_delta_h : " + str(threshold_delta_h) + endC)
        print(cyan + "createMnh() : " + endC + "mode_interpolation : " + str(mode_interpolation) + endC)
        print(cyan + "createMnh() : " + endC + "method_interpolation : " + str(method_interpolation) + endC)
        print(cyan + "createMnh() : " + endC + "interpolation_bco_radius : " + str(interpolation_bco_radius) + endC)
        print(cyan + "createMnh() : " + endC + "simplify_vector_param : " + str(simplify_vector_param) + endC)
        print(cyan + "createMnh() : " + endC + "epsg : " + str(epsg) + endC)
        print(cyan + "createMnh() : " + endC + "no_data_value : " + str(no_data_value) + endC)
        print(cyan + "createMnh() : " + endC + "ram_otb : " + str(ram_otb) + endC)
        print(cyan + "createMnh() : " + endC + "path_time_log : " + str(path_time_log) + endC)
        print(cyan + "createMnh() : " + endC + "format_raster : " + str(format_raster) + endC)
        print(cyan + "createMnh() : " + endC + "format_vector : " + str(format_vector) + endC)
        print(cyan + "createMnh() : " + endC + "extension_raster : " + str(extension_raster) + endC)
        print(cyan + "createMnh() : " + endC + "extension_vector : " + str(extension_vector) + endC)
        print(cyan + "createMnh() : " + endC + "save_results_intermediate : " + str(save_results_intermediate) + endC)
        print(cyan + "createMnh() : " + endC + "overwrite : " + str(overwrite) + endC)

    # LES CONSTANTES
    PRECISION = 0.0000001

    CODAGE_8B = "uint8"
    CODAGE_F = "float"

    SUFFIX_CUT = "_cut"
    SUFFIX_CLEAN = "_clean"
    SUFFIX_SAMPLE = "_sample"
    SUFFIX_MASK = "_mask"
    SUFFIX_TMP = "_tmp"
    SUFFIX_MNS = "_mns"
    SUFFIX_MNT = "_mnt"
    SUFFIX_ROAD = "_road"
    SUFFIX_BUILD = "_build"
    SUFFIX_RASTER = "_raster"
    SUFFIX_VECTOR = "_vector"

    # DEFINIR LES REPERTOIRES ET FICHIERS TEMPORAIRES
    repertory_output = os.path.dirname(image_mnh_output)
    basename_mnh = os.path.splitext(os.path.basename(image_mnh_output))[0]

    sub_repertory_raster_temp = repertory_output + os.sep + basename_mnh + SUFFIX_RASTER + SUFFIX_TMP
    sub_repertory_vector_temp = repertory_output + os.sep + basename_mnh + SUFFIX_VECTOR + SUFFIX_TMP
    cleanTempData(sub_repertory_raster_temp)
    cleanTempData(sub_repertory_vector_temp)

    basename_vector_emprise = os.path.splitext(os.path.basename(vector_emprise_input))[0]
    basename_mns_input = os.path.splitext(os.path.basename(image_mns_input))[0]
    basename_mnt_input = os.path.splitext(os.path.basename(image_mnt_input))[0]

    image_mnh_tmp = sub_repertory_raster_temp + os.sep + basename_mnh + SUFFIX_TMP + extension_raster
    image_mnh_road = sub_repertory_raster_temp + os.sep + basename_mnh + SUFFIX_ROAD + extension_raster

    vector_bd_bati_temp = sub_repertory_vector_temp + os.sep + basename_mnh + SUFFIX_BUILD + SUFFIX_TMP + extension_vector
    vector_bd_bati = repertory_output + os.sep + basename_mnh + SUFFIX_BUILD + extension_vector
    raster_bd_bati = sub_repertory_vector_temp + os.sep + basename_mnh + SUFFIX_BUILD + extension_raster
    removeVectorFile(vector_bd_bati)

    image_emprise_mnt_mask = sub_repertory_raster_temp + os.sep + basename_vector_emprise + SUFFIX_MNT + extension_raster
    image_mnt_cut = sub_repertory_raster_temp + os.sep + basename_mnt_input + SUFFIX_CUT + extension_raster
    image_mnt_clean = sub_repertory_raster_temp + os.sep + basename_mnt_input + SUFFIX_CLEAN + extension_raster
    image_mnt_clean_sample = sub_repertory_raster_temp + os.sep + basename_mnt_input + SUFFIX_CLEAN + SUFFIX_SAMPLE + extension_raster
    image_emprise_mns_mask = sub_repertory_raster_temp + os.sep + basename_vector_emprise + SUFFIX_MNS + extension_raster
    image_mns_cut = sub_repertory_raster_temp + os.sep + basename_mns_input + SUFFIX_CUT + extension_raster
    image_mns_clean = sub_repertory_raster_temp + os.sep + basename_mns_input + SUFFIX_CLEAN + extension_raster

    vector_bd_road_temp = sub_repertory_vector_temp + os.sep + basename_mnh + SUFFIX_ROAD + SUFFIX_TMP + extension_vector
    raster_bd_road_mask = sub_repertory_raster_temp + os.sep + basename_mnh + SUFFIX_ROAD + SUFFIX_MASK + extension_raster

    if image_threshold_input != "" :
        basename_threshold_input = os.path.splitext(os.path.basename(image_threshold_input))[0]
        image_threshold_cut = sub_repertory_raster_temp + os.sep + basename_threshold_input + SUFFIX_CUT + extension_raster
        image_threshold_mask = sub_repertory_raster_temp + os.sep + basename_threshold_input + SUFFIX_MASK + extension_raster

    # VERIFICATION SI LE FICHIER DE SORTIE EXISTE DEJA
    # Si un fichier de sortie avec le même nom existe déjà, et si l'option ecrasement est à false, alors on ne fait rien
    check = os.path.isfile(image_mnh_output)
    if check and not overwrite:
        print(bold + yellow +  "createMnh() : " + endC + "Create mnh %s from %s and %s already done : no actualisation" % (image_mnh_output, image_mns_input, image_mnt_input) + endC)
    # Si non, ou si la fonction ecrasement est désative, alors on le calcule
    else:
        if check:
            try: # Suppression de l'éventuel fichier existant
                removeFile(image_mnh_output)
            except Exception:
                pass # Si le fichier ne peut pas être supprimé, on suppose qu'il n'existe pas et on passe à la suite

        # DECOUPAGE DES FICHIERS MS ET MNT D'ENTREE PAR LE FICHIER D'EMPRISE
        if debug >= 3:
            print(bold + green +  "createMnh() : " + endC + "Decoupage selon l'emprise des fichiers %s et %s " %(image_mns_input, image_mnt_input) + endC)

        # Fonction de découpe du mns
        if not cutImageByVector(vector_emprise_input, image_mns_input, image_mns_cut, None, None, no_data_value, epsg, format_raster, format_vector) :
            raise NameError (cyan + "createMnh() : " + bold + red + "!!! Une erreur c'est produite au cours du decoupage de l'image : " + image_mns_input + ". Voir message d'erreur." + endC)

        # Fonction de découpe du mnt
        if not cutImageByVector(vector_emprise_input, image_mnt_input, image_mnt_cut, None, None, no_data_value, epsg, format_raster, format_vector) :
            raise NameError (cyan + "createMnh() : " + bold + red + "!!! Une erreur c'est produite au cours du decoupage de l'image : " + image_mnt_input + ". Voir message d'erreur." + endC)

        if debug >= 3:
            print(bold + green +  "createMnh() : " + endC + "Decoupage des fichiers %s et %s complet" %(image_mns_cut, image_mnt_cut) + endC)


        # REBOUCHAGE DES TROUS DANS LE MNT D'ENTREE SI NECESSAIRE

        nodata_mnt = getNodataValueImage(image_mnt_cut)
        pixelNodataCount = countPixelsOfValue(image_mnt_cut, nodata_mnt)

        if pixelNodataCount > 0 :

            if debug >= 3:
                print(bold + green +  "createMnh() : " + endC + "Fill the holes MNT for  %s" %(image_mnt_cut) + endC)

            # Rasterisation du vecteur d'emprise pour creer un masque pour boucher les trous du MNT
            rasterizeBinaryVector(vector_emprise_input, image_mnt_cut, image_emprise_mnt_mask, 1, CODAGE_8B)

            # Utilisation de SAGA pour boucher les trous
            fillNodata(image_mnt_cut, image_emprise_mnt_mask, image_mnt_clean, save_results_intermediate)

            if debug >= 3:
                print(bold + green +  "createMnh() : " + endC + "Fill the holes MNT to %s completed" %(image_mnt_clean) + endC)

        else :
            image_mnt_clean = image_mnt_cut
            if debug >= 3:
                print(bold + green +  "\ncreateMnh() : " + endC + "Fill the holes not necessary MNT for %s" %(image_mnt_cut) + endC)


        # REBOUCHAGE DES TROUS DANS LE MNS D'ENTREE SI NECESSAIRE

        nodata_mns = getNodataValueImage(image_mns_cut)
        pixelNodataCount = countPixelsOfValue(image_mns_cut, nodata_mns)

        if pixelNodataCount > 0 :

            if debug >= 3:
                print(bold + green +  "createMnh() : " + endC + "Fill the holes MNS for  %s" %(image_mns_cut) + endC)

            # Rasterisation du vecteur d'emprise pour creer un masque pour boucher les trous du MNS
            rasterizeBinaryVector(vector_emprise_input, image_mns_cut, image_emprise_mns_mask, 1, CODAGE_8B)

            # Utilisation de SAGA pour boucher les trous
            fillNodata(image_mns_cut, image_emprise_mns_mask, image_mns_clean, save_results_intermediate)

            if debug >= 3:
                print(bold + green +  "\ncreateMnh() : " + endC + "Fill the holes MNS to %s completed" %(image_mns_clean) + endC)

        else :
            image_mns_clean = image_mns_cut
            if debug >= 3:
                print(bold + green +  "createMnh() : " + endC + "Fill the holes not necessary MNS for %s" %(image_mns_cut) + endC)

        # CALLER LE FICHIER MNT AU FORMAT DU FICHIER MNS

        # Commande de mise en place de la geométrie re-echantionage
        command = "otbcli_Superimpose -inr " + image_mns_clean + " -inm " + image_mnt_clean + " -mode " + mode_interpolation + " -interpolator " + method_interpolation + " -out " + image_mnt_clean_sample

        if method_interpolation.lower() == 'bco' :
            command += " -interpolator.bco.radius " + str(interpolation_bco_radius)
        if ram_otb > 0:
            command += " -ram %d" %(ram_otb)

        if debug >= 3:
            print(cyan + "createMnh() : " + bold + green + "Réechantillonage du fichier %s par rapport à la reference %s" %(image_mnt_clean, image_mns_clean) + endC)
            print(command)

        exit_code = os.system(command)
        if exit_code != 0:
            print(command)
            raise NameError (cyan + "createMnh() : " + bold + red + "!!! Une erreur c'est produite au cours du superimpose de l'image : " + image_mnt_input + ". Voir message d'erreur." + endC)

        # INCRUSTATION DANS LE MNH DES DONNEES VECTEURS ROUTES

        if debug >= 3:
            print(bold + green +  "createMnh() : " + endC + "Use BD road to clean MNH"  + endC)

        # Creation d'un masque de filtrage des donnes routes (exemple : le NDVI)
        if image_threshold_input != "" :
            if not cutImageByVector(vector_emprise_input, image_threshold_input, image_threshold_cut, None, None, no_data_value, epsg, format_raster, format_vector) :
                raise NameError (cyan + "createMnh() : " + bold + red + "!!! Une erreur c'est produite au cours du decoupage de l'image : " + image_threshold_input + ". Voir message d'erreur." + endC)
            createBinaryMask(image_threshold_cut, image_threshold_mask, threshold_bd_value, False, CODAGE_8B)

        # Execution de la fonction createMacroSamples pour une image correspondant au données routes
        if bd_road_vector_input_list != [] :
            createMacroSamples(image_mns_clean, vector_emprise_input, vector_bd_road_temp, raster_bd_road_mask, bd_road_vector_input_list, bd_road_buff_list, sql_road_expression_list, path_time_log, basename_mnh, simplify_vector_param, format_vector, extension_vector, save_results_intermediate, overwrite)

        if debug >= 3:
            print(bold + green +  "\ncreateMnh() : " + endC + "File raster from BD road is create %s" %(raster_bd_road_mask) + endC)

        # CALCUL DU MNH

        # Calcul par bandMath du MNH definir l'expression qui soustrait le MNT au MNS en introduisant le biais et en mettant les valeurs à 0 à une valeur approcher de 0.0000001
        delta = ""
        if height_bias > 0 :
            delta = "+%s" %(str(height_bias))
        elif height_bias < 0 :
            delta = "-%s" %(str(abs(height_bias)))
        else :
            delta = ""

        # Definition de l'expression
        if bd_road_vector_input_list != [] :
            if image_threshold_input != "" :
                expression = "\"im3b1 > 0 and im4b1 > 0?%s:(im1b1-im2b1%s) > 0.0?im1b1-im2b1%s:%s\"" %(str(PRECISION), delta, delta, str(PRECISION))
                command = "otbcli_BandMath -il %s %s %s %s -out %s %s -exp %s" %(image_mns_clean, image_mnt_clean_sample, raster_bd_road_mask, image_threshold_mask, image_mnh_tmp, CODAGE_F, expression)
            else :
                expression = "\"im3b1 > 0?%s:(im1b1-im2b1%s) > 0.0?im1b1-im2b1%s:%s\"" %(str(PRECISION), delta, delta, str(PRECISION))
                command = "otbcli_BandMath -il %s %s %s -out %s %s -exp %s" %(image_mns_clean, image_mnt_clean_sample, raster_bd_road_mask, image_mnh_tmp, CODAGE_F, expression)
        else :
            expression = "\"(im1b1-im2b1%s) > 0.0?im1b1-im2b1%s:%s\"" %(delta, delta, str(PRECISION))
            command = "otbcli_BandMath -il %s %s -out %s %s -exp %s" %(image_mns_clean, image_mnt_clean_sample, image_mnh_tmp, CODAGE_F, expression)

        if ram_otb > 0:
            command += " -ram %d" %(ram_otb)

        if debug >= 3:
            print(cyan + "createMnh() : " + bold + green + "Calcul du MNH  %s difference du MNS : %s par le MNT :%s" %(image_mnh_tmp, image_mns_clean, image_mnt_clean_sample) + endC)
            print(command)

        exitCode = os.system(command)
        if exitCode != 0:
            print(command)
            raise NameError(cyan + "createMnh() : " + bold + red + "An error occured during otbcli_BandMath command to compute MNH " + image_mnh_tmp + ". See error message above." + endC)

        # DECOUPAGE DU MNH

        if bd_build_vector_input_list == []:
            image_mnh_road = image_mnh_output

        if debug >= 3:
            print(bold + green +  "createMnh() : " + endC + "Decoupage selon l'emprise du fichier mnh %s " %(image_mnh_tmp) + endC)

        # Fonction de découpe du mnh
        if not cutImageByVector(vector_emprise_input, image_mnh_tmp, image_mnh_road, None, None, no_data_value, epsg, format_raster, format_vector) :
            raise NameError (cyan + "createMnh() : " + bold + red + "!!! Une erreur c'est produite au cours du decoupage de l'image : " + image_mns_input + ". Voir message d'erreur." + endC)

        if debug >= 3:
            print(bold + green +  "createMnh() : " + endC + "Decoupage du fichier mnh %s complet" %(image_mnh_road) + endC)

        # INCRUSTATION DANS LE MNH DES DONNEES VECTEURS BATIS

        # Si demander => liste de fichier vecteur bati passé en donnée d'entrée
        if bd_build_vector_input_list != []:

            # Découpage des vecteurs de bd bati exogenes avec l'emprise
            vectors_build_cut_list = []
            for vector_build_input in bd_build_vector_input_list :
                vector_name = os.path.splitext(os.path.basename(vector_build_input))[0]
                vector_build_cut = sub_repertory_vector_temp + os.sep + vector_name + SUFFIX_CUT + extension_vector
                vectors_build_cut_list.append(vector_build_cut)
            cutoutVectors(vector_emprise_input, bd_build_vector_input_list, vectors_build_cut_list, format_vector)

            # Fusion des vecteurs batis découpés
            fusionVectors (vectors_build_cut_list, vector_bd_bati_temp)

            # Croisement vecteur rasteur entre le vecteur fusion des batis et le MNH créé precedement
            statisticsVectorRaster(image_mnh_road, vector_bd_bati_temp, "", 1, False, False, True, ['PREC_PLANI','PREC_ALTI','ORIGIN_BAT','median','sum','std','unique','range'], [], {}, path_time_log, True, format_vector, save_results_intermediate, overwrite)

            # Calcul de la colonne delta_H entre les hauteurs des batis et la hauteur moyenne du MNH sous le bati
            COLUMN_ID = "ID"
            COLUMN_H_BUILD = "HAUTEUR"
            COLUMN_H_BUILD_MIN = "Z_MIN"
            COLUMN_H_BUILD_MAX = "Z_MAX"
            COLUMN_H_MNH = "mean"
            COLUMN_H_MNH_MIN = "min"
            COLUMN_H_MNH_MAX = "max"
            COLUMN_H_DIFF = "H_diff"

            field_type = ogr.OFTReal
            field_value = 0.0
            field_width = 20
            field_precision = 2
            attribute_name_dico = {}
            attribute_name_dico[COLUMN_ID] = ogr.OFTString
            attribute_name_dico[COLUMN_H_BUILD] = ogr.OFTReal
            attribute_name_dico[COLUMN_H_MNH] = ogr.OFTReal

            # Ajouter la nouvelle colonne H_diff
            addNewFieldVector(vector_bd_bati_temp, COLUMN_H_DIFF, field_type, field_value, field_width, field_precision, format_vector)

            # Recuperer les valeur de hauteur du bati et du mnt dans le vecteur
            data_z_dico = getAttributeValues(vector_bd_bati_temp, None, None, attribute_name_dico, format_vector)

            # Calculer la difference des Hauteur bati et mnt
            field_new_values_dico = {}
            for index in range(len(data_z_dico[COLUMN_ID])) :
                index_polygon = data_z_dico[COLUMN_ID][index]
                delta_h = abs(data_z_dico[COLUMN_H_BUILD][index] - data_z_dico[COLUMN_H_MNH][index])
                field_new_values_dico[index_polygon] = {COLUMN_H_DIFF:delta_h}

            # Mettre à jour la colonne H_diff dans le vecteur
            setAttributeIndexValuesList(vector_bd_bati_temp, COLUMN_ID, field_new_values_dico, format_vector)

            # Suppression de tous les polygones bati dons la valeur du delat H est inferieur à threshold_delta_h
            column = "'%s, %s, %s, %s, %s, %s, %s, %s'"% (COLUMN_ID, COLUMN_H_BUILD, COLUMN_H_BUILD_MIN, COLUMN_H_BUILD_MAX, COLUMN_H_MNH, COLUMN_H_MNH_MIN, COLUMN_H_MNH_MAX, COLUMN_H_DIFF)
            expression = "%s > %s" % (COLUMN_H_DIFF, threshold_delta_h)
            filterSelectDataVector(vector_bd_bati_temp, vector_bd_bati, column, expression, overwrite, format_vector)

            # Attention!!!! PAUSE pour trie et verification des polygones bati nom deja present dans le MNH ou non
            if not automatic :
                print(bold + blue +  "Application MnhCreation => " + endC + "Vérification manuelle du vecteur bati %s pour ne concerver que les batis non présent dans le MNH courant %s" %(vector_bd_bati_temp, image_mnh_road) + endC)
                input(bold + red + "Appuyez sur entree pour continuer le programme..." + endC)

            # Creation du masque bati avec pour H la hauteur des batiments
            rasterizeVector(vector_bd_bati, raster_bd_bati, image_mnh_road, COLUMN_H_BUILD)

            # Fusion du mask des batis et du MNH temporaire
            expression = "\"im1b1 > 0.0?im1b1:im2b1\""
            command = "otbcli_BandMath -il %s %s -out %s %s -exp %s" %(raster_bd_bati, image_mnh_road, image_mnh_output, CODAGE_F, expression)

            if ram_otb > 0:
                command += " -ram %d" %(ram_otb)

            if debug >= 3:
                print(cyan + "createMnh() : " + bold + green + "Amelioration du MNH  %s ajout des hauteurs des batis %s" %(image_mnh_road, raster_bd_bati) + endC)
                print(command)

            exitCode = os.system(command)
            if exitCode != 0:
                print(command)
                raise NameError(cyan + "createMnh() : " + bold + red + "An error occured during otbcli_BandMath command to compute MNH Final" + image_mnh_output + ". See error message above." + endC)

    # SUPPRESIONS FICHIERS INTERMEDIAIRES INUTILES

    # Suppression des fichiers intermédiaires
    if not save_results_intermediate :
        if bd_build_vector_input_list != []:
            removeFile(image_mnh_road)
        removeFile(image_threshold_cut)
        removeFile(image_threshold_mask)
        removeFile(raster_bd_bati)
        removeVectorFile(vector_bd_road_temp)
        removeVectorFile(vector_bd_bati_temp)
        removeVectorFile(vector_bd_bati) # A confirmer!!!
        removeFile(raster_bd_road_mask)
        removeFile(image_mnh_tmp)
        deleteDir(sub_repertory_raster_temp)
        deleteDir(sub_repertory_vector_temp)

    print(endC)
    print(bold + green + "## END : MNH CREATION" + endC)
    print(endC)

    # Mise à jour du Log
    ending_event = "createMnh() : MNH creation ending : "
    timeLine(path_time_log,ending_event)

    return