def binaryMaskVect(input_image,
                   output_dir,
                   threshold,
                   input_cut_vector,
                   attributes_list,
                   no_data_value,
                   epsg,
                   format_raster="GTiff",
                   format_vector="ESRI Shapefile",
                   extension_raster=".tif",
                   extension_vector=".shp",
                   save_results_intermediate=False,
                   overwrite=True):

    # Affichage des paramètres
    if debug >= 3:
        print(bold + green +
              "Variables dans le binaryMaskVect - Variables générales" + endC)
        print(cyan + "binaryMaskVect() : " + endC + "input_image : " +
              str(input_image) + endC)
        print(cyan + "binaryMaskVect() : " + endC + "output_dir : " +
              str(output_dir) + endC)
        print(cyan + "binaryMaskVect() : " + endC + "threshold : " +
              str(threshold) + endC)
        print(cyan + "binaryMaskVect() : " + endC + "input_cut_vector : " +
              str(input_cut_vector) + endC)
        print(cyan + "binaryMaskVect() : " + endC + "format_raster : " +
              str(format_raster) + endC)
        print(cyan + "binaryMaskVect() : " + endC + "format_vector : " +
              str(format_vector) + endC)
        print(cyan + "binaryMaskVect() : " + endC + "extension_raster : " +
              str(extension_raster) + endC)
        print(cyan + "binaryMaskVect() : " + endC + "extension_vector : " +
              str(extension_vector) + endC)
        print(cyan + "binaryMaskVect() : " + endC +
              "save_results_intermediate : " + str(save_results_intermediate) +
              endC)
        print(cyan + "binaryMaskVect() : " + endC + "overwrite : " +
              str(overwrite) + endC)

    image_name = os.path.splitext(os.path.basename(input_image))[0]
    binary_mask = output_dir + os.sep + "bin_mask_" + image_name + "_" + str(
        threshold).replace('.', '_') + extension_raster
    binary_mask_decoup = output_dir + os.sep + "bin_mask_decoup_" + image_name + "_" + str(
        threshold).replace('.', '_') + extension_raster
    binary_mask_vector = output_dir + os.sep + "bin_mask_vect_" + image_name + "_" + str(
        threshold).replace('.', '_') + extension_vector

    # Création du répertoire de sortie s'il n'existe pas déjà
    if not os.path.exists(output_dir):
        os.makedirs(output_dir)

    # Suppression des fichiers temporaires pour les calculs
    if os.path.exists(binary_mask):
        removeFile(binary_mask)

    if os.path.exists(binary_mask_decoup):
        removeFile(binary_mask_decoup)

    if os.path.exists(binary_mask_vector):
        if overwrite:
            removeVectorFile(binary_mask_vector, format_vector)
        else:
            return binary_mask_vector

    # Création du masque binaire
    createBinaryMask(input_image, binary_mask, threshold, False)

    if input_cut_vector != "":
        # Découpe du raster
        cutImageByVector(input_cut_vector, binary_mask, binary_mask_decoup,
                         None, None, no_data_value, epsg, format_raster,
                         format_vector)
    else:
        binary_mask_decoup = binary_mask

    # Vectorisation du masque binaire découpé
    polygonizeRaster(binary_mask_decoup, binary_mask_vector, image_name, "id",
                     format_vector)

    # Ajout des champs au fichier vecteur créé
    for attribute in attributes_list:
        addNewFieldVector(binary_mask_vector, attribute.name,
                          attribute.ogrType, attribute.value, attribute.width,
                          None, format_vector)

    # Suppresions des fichiers intermediaires inutiles et reperoire temporaire
    if not save_results_intermediate:
        removeFile(binary_mask)
        removeFile(binary_mask_decoup)

    return binary_mask_vector
def preparationVecteurs(urbanatlas_input, ucz_output, emprise_file, mask_file,
                        enter_with_mask, image_file, mnh_file,
                        built_files_list, hydrography_file, roads_files_list,
                        rpg_file, indicators_method, ucz_method, dbms_choice,
                        threshold_ndvi, threshold_ndvi_water, threshold_ndwi2,
                        threshold_bi_bottom, threshold_bi_top, path_time_log,
                        temp_directory, format_vector, extension_vector):

    print(bold + yellow + "Début de la préparation des données vecteurs." +
          endC)
    step = "    Début de la préparation des données vecteurs : "
    timeLine(path_time_log, step)

    field_name = 'ID'
    field_type = ogr.OFTInteger

    emprise_erosion = temp_directory + os.sep + os.path.splitext(
        os.path.basename(emprise_file))[0] + "_eroded" + extension_vector
    print(bold + cyan +
          "    Érosion de '%s' pour le découpage des autres vecteurs :" %
          (emprise_file) + endC)
    bufferVector(
        emprise_file, emprise_erosion, -10, "", 1.0, 10, format_vector
    )  # Création du shape zone d'étude érodée (utile pour la fonction CrossingVectorRaster où shape < raster) - Tampon par défaut : -10

    # Traitements sur l'Urban Atlas
    print(bold + cyan + "    Traitements du fichier Urban Atlas '%s' :" %
          (urbanatlas_input) + endC)
    basename_grid = os.path.splitext(os.path.basename(urbanatlas_input))[0]
    grid_reproject = temp_directory + os.sep + basename_grid + "_reproject" + extension_vector
    grid_ready = temp_directory + os.sep + basename_grid + "_cut" + extension_vector
    grid_ready_cleaned = temp_directory + os.sep + basename_grid + "_cut_cleaned" + extension_vector
    column = "'%s, CODE2012, ITEM2012'" % (field_name)
    expression = "CODE2012 NOT IN ('12210', '12220', '12230', '50000')"
    updateProjection(urbanatlas_input, grid_reproject, 2154,
                     format_vector)  # MAJ projection
    addNewFieldVector(grid_reproject, field_name, field_type, 0, None, None,
                      format_vector)  # Ajout d'un champ ID
    updateIndexVector(
        grid_reproject,
        index_name=field_name)  # Mise à jour du champs ID (incrémentation)
    cutVector(
        emprise_erosion, grid_reproject, grid_ready, format_vector
    )  # Découpage du fichier Urban Atlas d'entrée à l'emprise de la zone d'étude
    ret = filterSelectDataVector(
        grid_ready, grid_ready_cleaned, column, expression, format_vector
    )  # Suppression des polygones eau et routes (uniquement pour le calcul des indicateurs)
    if not ret:
        raise NameError(
            cyan + "preparationVecteurs : " + bold + red +
            "Attention problème lors du filtrage des BD vecteurs l'expression SQL %s est incorrecte"
            % (expression) + endC)

    if indicators_method in ("BD_exogenes", "SI_seuillage", "SI_classif"):
        # Traitements sur les fichiers bâti de la BD TOPO
        print(bold + cyan + "    Traitements des fichiers bâti '%s' :" %
              str(built_files_list) + endC)
        built_merge = temp_directory + os.sep + "bati_merged" + extension_vector
        built_ready = temp_directory + os.sep + "bati" + extension_vector
        column = "HAUTEUR"
        expression = "HAUTEUR > 0"
        built_intersect_list = []
        for built_input in built_files_list:
            basename = os.path.splitext(os.path.basename(built_input))[0]
            built_reproject = temp_directory + os.sep + basename + "_reproject" + extension_vector
            built_intersect = temp_directory + os.sep + basename + "_intersect" + extension_vector
            updateProjection(built_input, built_reproject, 2154,
                             format_vector)  # MAJ projection
            intersectVector(
                emprise_file, built_reproject, built_intersect, format_vector
            )  # Sélection des entités bâti dans l'emprise de l'étude
            built_intersect_list.append(built_intersect)
        fusionVectors(built_intersect_list, built_merge,
                      format_vector)  # Fusion des couches bâti de la BD TOPO
        ret = filterSelectDataVector(
            built_merge, built_ready, column, expression
        )  # Suppression des polygones où la hauteur du bâtiment est à 0
        if not ret:
            raise NameError(
                cyan + "preparationVecteurs : " + bold + red +
                "Attention problème lors du filtrage des BD vecteurs l'expression SQL %s est incorrecte"
                % (expression) + endC)

        addNewFieldVector(built_ready, field_name, field_type, 0, None, None,
                          format_vector)  # Ajout d'un champ ID
        updateIndexVector(
            built_ready,
            index_name=field_name)  # Mise à jour du champs ID (incrémentation)

        if indicators_method == "BD_exogenes":
            # Traitements sur le fichier routes de la BD TOPO
            print(bold + cyan +
                  "    Traitements du fichier hydrographie '%s' :" %
                  (hydrography_file) + endC)
            basename_hydrography = os.path.splitext(
                os.path.basename(hydrography_file))[0]
            hydrography_reproject = temp_directory + os.sep + basename_hydrography + "_reproject" + extension_vector
            hydrography_intersect = temp_directory + os.sep + basename_hydrography + "_intersect" + extension_vector
            hydrography_ready = temp_directory + os.sep + "eau" + extension_vector
            column = "REGIME"
            expression = "REGIME LIKE 'Permanent'"
            updateProjection(hydrography_file, hydrography_reproject, 2154,
                             format_vector)  # MAJ projection
            intersectVector(
                emprise_file, hydrography_reproject, hydrography_intersect,
                format_vector
            )  # Sélection des entités routes dans l'emprise de l'étude
            ret = filterSelectDataVector(
                hydrography_intersect, hydrography_ready, column, expression,
                format_vector
            )  # Sélection des entités suivant le régime hydrographique (permanent)
            if not ret:
                raise NameError(
                    cyan + "preparationVecteurs : " + bold + red +
                    "Attention problème lors du filtrage des BD vecteurs l'expression SQL %s est incorrecte"
                    % (expression) + endC)
            addNewFieldVector(hydrography_ready, field_name, field_type, 0,
                              None, None, format_vector)  # Ajout d'un champ ID
            updateIndexVector(hydrography_ready, index_name=field_name
                              )  # Mise à jour du champs ID (incrémentation)

            # Traitements sur le fichier RPG
            print(bold + cyan + "    Traitements du fichier RPG '%s' :" %
                  (rpg_file) + endC)
            basename_RPG = os.path.splitext(os.path.basename(rpg_file))[0]
            RPG_reproject = temp_directory + os.sep + basename_RPG + "_reproject" + extension_vector
            RPG_ready = temp_directory + os.sep + "RPG" + extension_vector
            updateProjection(rpg_file, RPG_reproject, 2154,
                             format_vector)  # MAJ projection
            intersectVector(
                emprise_file, RPG_reproject, RPG_ready, format_vector
            )  # Sélection des entités RPG dans l'emprise de l'étude
            addNewFieldVector(RPG_ready, field_name, field_type, 0, None, None,
                              format_vector)  # Ajout d'un champ ID
            updateIndexVector(RPG_ready, index_name=field_name
                              )  # Mise à jour du champs ID (incrémentation)

########################################################################################################################################################################################################
######################################################################## Partie restant à coder : normalement pas nécessaire puisque cette méthode n'a pas été retenue #################################
########################################################################################################################################################################################################
####
        if indicators_method == "SI_seuillage":  ####
            # Traitements sur les fichiers routes de la BD TOPO                                                                                                                                     ####
            print(bold + cyan + "    Traitements des fichiers routes '%s' :" %
                  str(roads_files_list) + endC)  ####
            ####
            print(
                bold +
                "Le script ne peut continuer, le traitements des fichiers routes n'est pas encore entièrement codé"
                + endC)  ####
            exit(0)  ####
            ####
            #~ En entrée : fichier troncon_route + fichier surface_route                                                                                                                            ####
            #~ 1 - reprojection des fichiers en L93                                                                                                                                                 ####
            #~ 2 - sélection des entités des fichiers compris dans la zone d'étude (intersect et non découpage)                                                                                     ####
            #~ 3 - filtrage des entités de troncon_route suivant la nature                                                                                                                          ####
            #~ ("NATURE IN ('Autoroute', 'Bretelle', 'Quasi-autoroute', 'Route  1 chausse', 'Route  2 chausses',                                                                                ####
            #~ 'Route a 1 chaussee', 'Route a 2 chaussees', 'Route à 1 chaussée', 'Route à 2 chaussées')")                                                                                      ####
            #~ 4 - tampon sur les entités de troncon_route correspondant à 'LARGEUR'/2                                                                                                              ####
            #~ 5 - fusion des fichiers en un seul shape                                                                                                                                             ####
            #~ 6 - ajout d'un nouveau champ ID dans le fichier de fusion                                                                                                                            ####
            #~ 7 - mise à jour de ce champ ID                                                                                                                                                       ####
            ####


########################################################################################################################################################################################################
########################################################################################################################################################################################################
########################################################################################################################################################################################################

    step = "    Fin de la préparation des données vecteurs : "
    timeLine(path_time_log, step)
    print(bold + yellow + "Fin de la préparation des données vecteurs." + endC)
    print("\n")

    return
def vectorsPreparation(emprise_file, classif_input, grid_input, built_input_list, roads_input_list, grid_output, grid_output_cleaned, built_output, roads_output, col_code_ua, col_item_ua, epsg, path_time_log, format_vector='ESRI Shapefile', extension_vector=".shp", save_results_intermediate=False, overwrite=True):

    print(bold + yellow + "Début de la préparation des fichiers vecteurs." + endC + "\n")
    timeLine(path_time_log, "Début de la préparation des fichiers vecteurs : ")

    if debug >= 3 :
        print(bold + green + "vectorsPreparation() : Variables dans la fonction" + endC)
        print(cyan + "vectorsPreparation() : " + endC + "emprise_file : " + str(emprise_file) + endC)
        print(cyan + "vectorsPreparation() : " + endC + "classif_input : " + str(classif_input) + endC)
        print(cyan + "vectorsPreparation() : " + endC + "grid_input : " + str(grid_input) + endC)
        print(cyan + "vectorsPreparation() : " + endC + "built_input_list : " + str(built_input_list) + endC)
        print(cyan + "vectorsPreparation() : " + endC + "roads_input_list : " + str(roads_input_list) + endC)
        print(cyan + "vectorsPreparation() : " + endC + "grid_output : " + str(grid_output) + endC)
        print(cyan + "vectorsPreparation() : " + endC + "grid_output_cleaned : " + str(grid_output_cleaned) + endC)
        print(cyan + "vectorsPreparation() : " + endC + "built_output : " + str(built_output) + endC)
        print(cyan + "vectorsPreparation() : " + endC + "roads_output : " + str(roads_output) + endC)
        print(cyan + "vectorsPreparation() : " + endC + "col_code_ua : " + str(col_code_ua) + endC)
        print(cyan + "vectorsPreparation() : " + endC + "col_item_ua : " + str(col_item_ua) + endC)
        print(cyan + "vectorsPreparation() : " + endC + "epsg : " + str(epsg))
        print(cyan + "vectorsPreparation() : " + endC + "path_time_log : " + str(path_time_log) + endC)
        print(cyan + "vectorsPreparation() : " + endC + "format_vector : " + str(format_vector) + endC)
        print(cyan + "vectorsPreparation() : " + endC + "extension_vector : " + str(extension_vector) + endC)
        print(cyan + "vectorsPreparation() : " + endC + "save_results_intermediate : " + str(save_results_intermediate) + endC)
        print(cyan + "vectorsPreparation() : " + endC + "overwrite : " + str(overwrite) + endC)

    FOLDER_TEMP = 'TEMP'
    SUFFIX_VECTOR_REPROJECT = '_reproject'
    SUFFIX_VECTOR_INTERSECT = '_intersect'
    SUFFIX_VECTOR_MERGE = '_merge'
    SUFFIX_VECTOR_SELECT = '_select'

    if not os.path.exists(grid_output) or not os.path.exists(built_output) or not os.path.exists(roads_output) or overwrite:

        ############################################
        ### Préparation générale des traitements ###
        ############################################

        path_grid_temp = os.path.dirname(grid_output) + os.sep + FOLDER_TEMP
        path_built_temp = os.path.dirname(built_output) + os.sep + FOLDER_TEMP
        path_roads_temp = os.path.dirname(roads_output) + os.sep + FOLDER_TEMP

        if os.path.exists(path_grid_temp):
            shutil.rmtree(path_grid_temp)
        if os.path.exists(path_built_temp):
            shutil.rmtree(path_built_temp)
        if os.path.exists(path_roads_temp):
            shutil.rmtree(path_roads_temp)

        if not os.path.exists(path_grid_temp):
            os.mkdir(path_grid_temp)
        if not os.path.exists(path_built_temp):
            os.mkdir(path_built_temp)
        if not os.path.exists(path_roads_temp):
            os.mkdir(path_roads_temp)

        basename_grid = os.path.splitext(os.path.basename(grid_output))[0]
        basename_built = os.path.splitext(os.path.basename(built_output))[0]
        basename_roads = os.path.splitext(os.path.basename(roads_output))[0]

        # Variables pour ajout colonne ID
        field_name = 'ID' # Attention ! Nom fixé en dur dans les scripts indicateurs, pas dans le script final
        field_type = ogr.OFTInteger

        ##############################################
        ### Traitements sur le vecteur Urban Atlas ###
        ##############################################

        if not os.path.exists(grid_output) or overwrite :

            if os.path.exists(grid_output):
                removeVectorFile(grid_output)
            if os.path.exists(grid_output_cleaned):
                removeVectorFile(grid_output_cleaned)

            # MAJ projection
            grid_reproject = path_grid_temp + os.sep + basename_grid + SUFFIX_VECTOR_REPROJECT + extension_vector
            updateProjection(grid_input, grid_reproject, projection=epsg)

            # Découpage du fichier Urban Atlas d'entrée à l'emprise de la zone d'étude
            grid_output_temp = os.path.splitext(grid_output)[0] + "_temp" + extension_vector
            cutVector(emprise_file, grid_reproject, grid_output_temp, overwrite, format_vector)

            # Suppression des très petits polygones qui introduisent des valeurs NaN
            pixel_size = getPixelSizeImage(classif_input)
            min_size_area = pixel_size * 2
            cleanMiniAreaPolygons(grid_output_temp, grid_output, min_size_area, '', format_vector)
            if not save_results_intermediate:
                if os.path.exists(grid_output_temp):
                    removeVectorFile(grid_output_temp, format_vector)

            # Ajout d'un champ ID
            addNewFieldVector(grid_output, field_name, field_type, 0, None, None, format_vector)
            updateIndexVector(grid_output, field_name, format_vector)

            # Suppression des polygones eau et routes (uniquement pour le calcul des indicateurs)
            column = "'%s, %s, %s'" % (field_name, col_code_ua, col_item_ua)
            expression = "%s NOT IN ('12210', '12220', '12230', '50000')" % (col_code_ua)
            ret = filterSelectDataVector(grid_output, grid_output_cleaned, column, expression, format_vector)
            if not ret :
                raise NameError (cyan + "vectorsPreparation : " + bold + red  + "Attention problème lors du filtrage des BD vecteurs l'expression SQL %s est incorrecte" %(expression) + endC)

        #########################################
        ### Traitements sur les vecteurs bâti ###
        #########################################
        if not os.path.exists(built_output) or overwrite :

            if os.path.exists(built_output):
                removeVectorFile(built_output)

            # MAJ projection
            built_reproject_list=[]
            for built_input in built_input_list:
                built_reproject = path_built_temp + os.sep + os.path.splitext(os.path.basename(built_input))[0] + SUFFIX_VECTOR_REPROJECT + extension_vector
                updateProjection(built_input, built_reproject, projection=epsg)
                built_reproject_list.append(built_reproject)

            # Sélection des entités bâti dans l'emprise de l'étude
            built_intersect_list = []
            for built_reproject in built_reproject_list:
                built_intersect = path_built_temp + os.sep + os.path.splitext(os.path.basename(built_reproject))[0] + SUFFIX_VECTOR_INTERSECT + extension_vector
                intersectVector(emprise_file, built_reproject, built_intersect, format_vector)
                built_intersect_list.append(built_intersect)

            # Fusion des couches bâti de la BD TOPO
            built_merge = path_built_temp + os.sep + basename_built + SUFFIX_VECTOR_MERGE + extension_vector
            built_select = path_built_temp + os.sep + basename_built + SUFFIX_VECTOR_SELECT + extension_vector
            fusionVectors(built_intersect_list, built_merge)

            # Suppression des polygones où la hauteur du bâtiment est à 0
            column = "HAUTEUR"
            expression = "HAUTEUR > 0"
            ret = filterSelectDataVector(built_merge, built_select, column, expression, format_vector)
            if not ret :
                raise NameError (cyan + "vectorsPreparation : " + bold + red  + "Attention problème lors du filtrage des BD vecteurs l'expression SQL %s est incorrecte" %(expression) + endC)

            # Découpage des bati d'entrée à l'emprise de la zone d'étude
            cutVector(emprise_file, built_select, built_output, overwrite, format_vector)

            # Ajout d'un champ ID
            addNewFieldVector(built_output, field_name, field_type, 0, None, None, format_vector)
            updateIndexVector(built_output, field_name, format_vector)

        ###########################################
        ### Traitements sur les vecteurs routes ###
        ###########################################

        if not os.path.exists(roads_output) or overwrite :

            if os.path.exists(roads_output):
                removeVectorFile(roads_output)

            # MAJ projection
            roads_reproject_list=[]
            for roads_input in roads_input_list:
                roads_reproject = path_roads_temp + os.sep + os.path.splitext(os.path.basename(roads_input))[0] + SUFFIX_VECTOR_REPROJECT + extension_vector
                updateProjection(roads_input, roads_reproject, projection=epsg)
                roads_reproject_list.append(roads_reproject)

            # Sélection des entités routes dans l'emprise de l'étude
            roads_intersect_list = []
            for roads_reproject in roads_reproject_list:
                roads_intersect = path_roads_temp + os.sep + os.path.splitext(os.path.basename(roads_reproject))[0] + SUFFIX_VECTOR_INTERSECT + extension_vector
                intersectVector(emprise_file, roads_reproject, roads_intersect, format_vector)
                roads_intersect_list.append(roads_intersect)

            # Fusion des couches route de la BD TOPO
            roads_merge = path_roads_temp + os.sep + basename_roads + SUFFIX_VECTOR_MERGE + extension_vector
            roads_select = path_roads_temp + os.sep + basename_roads + SUFFIX_VECTOR_SELECT + extension_vector
            fusionVectors(roads_intersect_list, roads_merge)

            # Sélection des entités suivant la nature de la route dans la couche routes de la BD TOPO
            column = "NATURE"
            expression = "NATURE IN ('Autoroute', 'Bretelle', 'Quasi-autoroute', 'Route  1 chausse', 'Route  2 chausses', 'Route a 1 chaussee', 'Route a 2 chaussees', 'Route à 1 chaussée', 'Route à 2 chaussées')"
            ret = filterSelectDataVector (roads_merge, roads_select, column, expression, format_vector)
            if not ret :
                raise NameError (cyan + "vectorsPreparation : " + bold + red  + "Attention problème lors du filtrage des BD vecteurs l'expression SQL %s est incorrecte" %(expression) + endC)

            # Découpage des routes d'entrée à l'emprise de la zone d'étude
            cutVectorAll(emprise_file, roads_select, roads_output, overwrite, format_vector)

            # Ajout d'un champ ID
            addNewFieldVector(roads_output, field_name, field_type, 0, None, None, format_vector)
            updateIndexVector(roads_output, field_name, format_vector)

        ##########################################
        ### Nettoyage des fichiers temporaires ###
        ##########################################

        if not save_results_intermediate:
            if os.path.exists(path_grid_temp):
                shutil.rmtree(path_grid_temp)
            if os.path.exists(path_built_temp):
                shutil.rmtree(path_built_temp)
            if os.path.exists(path_roads_temp):
                shutil.rmtree(path_roads_temp)

    else:
        print(bold + magenta + "La préparation des fichiers vecteurs a déjà eu lieu.\n" + endC)

    print(bold + yellow + "Fin de la préparation des fichiers vecteurs.\n" + endC)
    timeLine(path_time_log, "Fin de la préparation des fichiers vecteurs : ")

    return
def computeMajorityClass(input_grid, temp_directory, nodata_field, built_field,
                         mineral_field, baresoil_field, water_field,
                         vegetation_field, high_vegetation_field,
                         low_vegetation_field, maj_ocs_field, veg_mean_field,
                         class_label_dico_out, format_vector, extension_vector,
                         overwrite):

    SUFFIX_CLASS = '_class'
    FIELD_TYPE = ogr.OFTInteger
    FIELD_NAME_MAJORITY = 'majority'

    temp_class_list = []

    base_name = os.path.splitext(os.path.basename(input_grid))[0]
    temp_grid = temp_directory + os.sep + base_name + SUFFIX_CLASS + extension_vector
    temp_class0 = temp_directory + os.sep + base_name + SUFFIX_CLASS + "0" + extension_vector
    temp_class1 = temp_directory + os.sep + base_name + SUFFIX_CLASS + "1" + extension_vector
    temp_class2 = temp_directory + os.sep + base_name + SUFFIX_CLASS + "2" + extension_vector
    temp_class3 = temp_directory + os.sep + base_name + SUFFIX_CLASS + "3" + extension_vector
    temp_class4 = temp_directory + os.sep + base_name + SUFFIX_CLASS + "4" + extension_vector

    ### Récupération de la classe majoritaire

    if debug >= 3:
        print(cyan + "computeMajorityClass() : " + endC + bold +
              "Récupération de la classe majoritaire." + endC + '\n')

    addNewFieldVector(input_grid,
                      maj_ocs_field,
                      FIELD_TYPE,
                      field_value=None,
                      field_width=None,
                      field_precision=None,
                      format_vector=format_vector)
    attr_names_list = getAttributeNameList(input_grid,
                                           format_vector=format_vector)
    attr_names_list_str = "'"
    for attr_name in attr_names_list:
        attr_names_list_str += attr_name + ', '
    attr_names_list_str = attr_names_list_str[:-2] + "'"

    expression = "%s = '%s' OR %s = '%s' OR %s = '%s' OR %s = '%s'" % (
        FIELD_NAME_MAJORITY, nodata_field, FIELD_NAME_MAJORITY, built_field,
        FIELD_NAME_MAJORITY, mineral_field, FIELD_NAME_MAJORITY, water_field)
    ret = filterSelectDataVector(input_grid,
                                 temp_class0,
                                 attr_names_list_str,
                                 expression,
                                 overwrite=overwrite,
                                 format_vector=format_vector)
    updateFieldVector(temp_class0,
                      field_name=maj_ocs_field,
                      value=class_label_dico_out["MAJ_OTHERS_CLASS"],
                      format_vector=format_vector)
    temp_class_list.append(temp_class0)

    expression = "%s = '%s'" % (FIELD_NAME_MAJORITY, baresoil_field)
    ret = filterSelectDataVector(input_grid,
                                 temp_class1,
                                 attr_names_list_str,
                                 expression,
                                 overwrite=overwrite,
                                 format_vector=format_vector)
    updateFieldVector(temp_class1,
                      field_name=maj_ocs_field,
                      value=class_label_dico_out["MAJ_BARESOIL_CLASS"],
                      format_vector=format_vector)
    temp_class_list.append(temp_class1)

    expression = "(%s = '%s' OR %s = '%s' OR %s = '%s') AND (%s < 1)" % (
        FIELD_NAME_MAJORITY, vegetation_field, FIELD_NAME_MAJORITY,
        low_vegetation_field, FIELD_NAME_MAJORITY, high_vegetation_field,
        veg_mean_field)
    ret = filterSelectDataVector(input_grid,
                                 temp_class2,
                                 attr_names_list_str,
                                 expression,
                                 overwrite=overwrite,
                                 format_vector=format_vector)
    updateFieldVector(temp_class2,
                      field_name=maj_ocs_field,
                      value=class_label_dico_out["MAJ_LOW_VEG_CLASS"],
                      format_vector=format_vector)
    temp_class_list.append(temp_class2)

    expression = "(%s = '%s' OR %s = '%s' OR %s = '%s') AND (%s >= 1 AND %s < 5)" % (
        FIELD_NAME_MAJORITY, vegetation_field, FIELD_NAME_MAJORITY,
        low_vegetation_field, FIELD_NAME_MAJORITY, high_vegetation_field,
        veg_mean_field, veg_mean_field)
    ret = filterSelectDataVector(input_grid,
                                 temp_class3,
                                 attr_names_list_str,
                                 expression,
                                 overwrite=overwrite,
                                 format_vector=format_vector)
    updateFieldVector(temp_class3,
                      field_name=maj_ocs_field,
                      value=class_label_dico_out["MAJ_MED_VEG_CLASS"],
                      format_vector=format_vector)
    temp_class_list.append(temp_class3)

    expression = "(%s = '%s' OR %s = '%s' OR %s = '%s') AND (%s >= 5)" % (
        FIELD_NAME_MAJORITY, vegetation_field, FIELD_NAME_MAJORITY,
        low_vegetation_field, FIELD_NAME_MAJORITY, high_vegetation_field,
        veg_mean_field)
    ret = filterSelectDataVector(input_grid,
                                 temp_class4,
                                 attr_names_list_str,
                                 expression,
                                 overwrite=overwrite,
                                 format_vector=format_vector)
    updateFieldVector(temp_class4,
                      field_name=maj_ocs_field,
                      value=class_label_dico_out["MAJ_HIGH_VEG_CLASS"],
                      format_vector=format_vector)
    temp_class_list.append(temp_class4)

    fusionVectors(temp_class_list, temp_grid, format_vector=format_vector)
    removeVectorFile(input_grid, format_vector=format_vector)
    copyVectorFile(temp_grid, input_grid, format_vector=format_vector)

    return 0
def classesOfWaterHeights(input_flooded_areas_vector,
                          input_digital_elevation_model_file,
                          output_heights_classes_file,
                          output_heights_classes_vector,
                          heights_classes='0,0.5,1,1.5,2',
                          epsg=2154,
                          no_data_value=0,
                          format_raster='GTiff',
                          format_vector='ESRI Shapefile',
                          extension_raster='.tif',
                          extension_vector='.shp',
                          grass_gisbase=os.environ['GISBASE'],
                          grass_gisdb='GRASS_database',
                          grass_location='LOCATION',
                          grass_mapset='MAPSET',
                          path_time_log='',
                          save_results_intermediate=False,
                          overwrite=True):

    if debug >= 3:
        print('\n' + bold + green +
              "Classes de hauteurs d'eau - Variables dans la fonction :" +
              endC)
        print(cyan + "    classesOfWaterHeights() : " + endC +
              "input_flooded_areas_vector : " +
              str(input_flooded_areas_vector) + endC)
        print(cyan + "    classesOfWaterHeights() : " + endC +
              "input_digital_elevation_model_file : " +
              str(input_digital_elevation_model_file) + endC)
        print(cyan + "    classesOfWaterHeights() : " + endC +
              "output_heights_classes_file : " +
              str(output_heights_classes_file) + endC)
        print(cyan + "    classesOfWaterHeights() : " + endC +
              "output_heights_classes_vector : " +
              str(output_heights_classes_vector) + endC)
        print(cyan + "    classesOfWaterHeights() : " + endC +
              "heights_classes : " + str(heights_classes) + endC)
        print(cyan + "    classesOfWaterHeights() : " + endC + "epsg : " +
              str(epsg) + endC)
        print(cyan + "    classesOfWaterHeights() : " + endC +
              "no_data_value : " + str(no_data_value) + endC)
        print(cyan + "    classesOfWaterHeights() : " + endC +
              "format_raster : " + str(format_raster) + endC)
        print(cyan + "    classesOfWaterHeights() : " + endC +
              "format_vector : " + str(format_vector) + endC)
        print(cyan + "    classesOfWaterHeights() : " + endC +
              "extension_raster : " + str(extension_raster) + endC)
        print(cyan + "    classesOfWaterHeights() : " + endC +
              "extension_vector : " + str(extension_vector) + endC)
        print(cyan + "    classesOfWaterHeights() : " + endC +
              "grass_gisbase : " + str(grass_gisbase) + endC)
        print(cyan + "    classesOfWaterHeights() : " + endC +
              "grass_gisdb : " + str(grass_gisdb) + endC)
        print(cyan + "    classesOfWaterHeights() : " + endC +
              "grass_location : " + str(grass_location) + endC)
        print(cyan + "    classesOfWaterHeights() : " + endC +
              "grass_mapset : " + str(grass_mapset) + endC)
        print(cyan + "    classesOfWaterHeights() : " + endC +
              "path_time_log : " + str(path_time_log) + endC)
        print(cyan + "    classesOfWaterHeights() : " + endC +
              "save_results_intermediate : " + str(save_results_intermediate) +
              endC)
        print(cyan + "    classesOfWaterHeights() : " + endC + "overwrite : " +
              str(overwrite) + endC + '\n')

    # Définition des constantes
    ENCODING_RASTER_FLOAT = 'float'
    ENCODING_RASTER_UINT8 = 'uint8'
    EXTENSION_RASTER_SAGA = '.sdat'
    FORMAT_VECTOR_GRASS = format_vector.replace(' ', '_')
    SUFFIX_TEMP = '_temp'
    SUFFIX_LINES = '_lines'
    SUFFIX_POINTS = '_points'
    SUFFIX_ALTI = '_altitude'
    SUFFIX_CUT = '_cut'
    SUFFIX_RAW = '_raw_heights'
    INDEX_FIELD = 'idx'
    ALTI_FIELD = 'altitude'
    VECTORISATION = 'GRASS'

    # Mise à jour du log
    starting_event = "classesOfWaterHeights() : Début du traitement : "
    timeLine(path_time_log, starting_event)

    print(cyan + "classesOfWaterHeights() : " + bold + green +
          "DEBUT DES TRAITEMENTS" + endC + '\n')

    # Définition des variables 'basename'
    flooded_areas_basename = os.path.splitext(
        os.path.basename(input_flooded_areas_vector))[0]
    digital_elevation_model_basename = os.path.splitext(
        os.path.basename(input_digital_elevation_model_file))[0]
    flooded_areas_lines_basename = flooded_areas_basename + SUFFIX_LINES
    flooded_areas_points_basename = flooded_areas_basename + SUFFIX_POINTS
    if output_heights_classes_file != "":
        output_heights_classes_basename = os.path.splitext(
            os.path.basename(output_heights_classes_file))[0]
        output_dirname = os.path.dirname(output_heights_classes_file)
    else:
        output_heights_classes_basename = os.path.splitext(
            os.path.basename(output_heights_classes_vector))[0]
        output_dirname = os.path.dirname(output_heights_classes_vector)

    # Définition des variables temp
    temp_directory = output_dirname + os.sep + output_heights_classes_basename + SUFFIX_TEMP
    flooded_areas_lines = temp_directory + os.sep + flooded_areas_lines_basename + extension_vector
    flooded_areas_points = temp_directory + os.sep + flooded_areas_points_basename + extension_vector
    altitude_points = temp_directory + os.sep + flooded_areas_points_basename + SUFFIX_ALTI + extension_vector
    altitude_grid = temp_directory + os.sep + flooded_areas_basename + SUFFIX_ALTI + EXTENSION_RASTER_SAGA
    altitude_file = temp_directory + os.sep + flooded_areas_basename + SUFFIX_ALTI + SUFFIX_CUT + extension_raster
    digital_elevation_model_cut = temp_directory + os.sep + digital_elevation_model_basename + SUFFIX_CUT + extension_raster
    raw_heights = temp_directory + os.sep + flooded_areas_basename + SUFFIX_RAW + extension_raster
    heights_classes_temp = temp_directory + os.sep + output_heights_classes_basename + extension_raster
    if output_heights_classes_file == "":
        output_heights_classes_file = output_dirname + os.sep + output_heights_classes_basename + extension_raster

    # Nettoyage des traitements précédents
    if debug >= 3:
        print(cyan + "classesOfWaterHeights() : " + endC +
              "Nettoyage des traitements précédents." + endC + '\n')
    removeFile(output_heights_classes_file)
    removeVectorFile(output_heights_classes_vector,
                     format_vector=format_vector)
    cleanTempData(temp_directory)

    #############
    # Etape 0/6 # Préparation des traitements
    #############

    print(cyan + "classesOfWaterHeights() : " + bold + green +
          "ETAPE 0/6 - Début de la préparation des traitements." + endC + '\n')

    # Préparation de GRASS
    xmin, xmax, ymin, ymax = getEmpriseImage(
        input_digital_elevation_model_file)
    pixel_width, pixel_height = getPixelWidthXYImage(
        input_digital_elevation_model_file)
    grass_gisbase, grass_gisdb, grass_location, grass_mapset = initializeGrass(
        temp_directory,
        xmin,
        xmax,
        ymin,
        ymax,
        pixel_width,
        pixel_height,
        projection=epsg,
        gisbase=grass_gisbase,
        gisdb=grass_gisdb,
        location=grass_location,
        mapset=grass_mapset,
        clean_old=True,
        overwrite=overwrite)

    # Gestion des classes de hauteurs d'eau
    thresholds_list = heights_classes.split(',')
    thresholds_list_float = [float(x) for x in thresholds_list]
    thresholds_list_float.sort()
    thresholds_list_float_len = len(thresholds_list_float)

    print(cyan + "classesOfWaterHeights() : " + bold + green +
          "ETAPE 0/6 - Fin de la préparation des traitements." + endC + '\n')

    #############
    # Etape 1/6 # Création de points sur le périmètre de l'emprise inondée
    #############

    print(
        cyan + "classesOfWaterHeights() : " + bold + green +
        "ETAPE 1/6 - Début de la création de points sur le périmètre de l'emprise inondée."
        + endC + '\n')

    # Conversion de l'emprise inondée en polylignes
    convertePolygon2Polylines(input_flooded_areas_vector,
                              flooded_areas_lines,
                              overwrite=overwrite,
                              format_vector=format_vector)

    # Création de points le long du polyligne
    use = 'vertex'
    dmax = 10
    percent = False
    importVectorOgr2Grass(flooded_areas_lines,
                          flooded_areas_lines_basename,
                          overwrite=overwrite)
    pointsAlongPolylines(flooded_areas_lines_basename,
                         flooded_areas_points_basename,
                         use=use,
                         dmax=dmax,
                         percent=percent,
                         overwrite=overwrite)
    exportVectorOgr2Grass(flooded_areas_points_basename,
                          flooded_areas_points,
                          format_vector=FORMAT_VECTOR_GRASS,
                          overwrite=overwrite)

    # Ajout d'un index sur les points
    addNewFieldVector(flooded_areas_points,
                      INDEX_FIELD,
                      ogr.OFTInteger,
                      field_value=None,
                      field_width=None,
                      field_precision=None,
                      format_vector=format_vector)
    updateIndexVector(flooded_areas_points,
                      index_name=INDEX_FIELD,
                      format_vector=format_vector)

    print(
        cyan + "classesOfWaterHeights() : " + bold + green +
        "ETAPE 1/6 - Fin de la création de points sur le périmètre de l'emprise inondée."
        + endC + '\n')

    #############
    # Etape 2/6 # Récupération de l'altitude sous chaque point
    #############

    print(
        cyan + "classesOfWaterHeights() : " + bold + green +
        "ETAPE 2/6 - Début de la récupération de l'altitude sous chaque point."
        + endC + '\n')

    # Ajout d'un champ pour récupérer l'altitude
    addNewFieldVector(flooded_areas_points,
                      ALTI_FIELD,
                      ogr.OFTReal,
                      field_value=None,
                      field_width=None,
                      field_precision=None,
                      format_vector=format_vector)

    # Echantillonnage du MNT sous le fichier points
    importVectorOgr2Grass(flooded_areas_points,
                          flooded_areas_points_basename,
                          overwrite=overwrite)
    importRasterGdal2Grass(input_digital_elevation_model_file,
                           digital_elevation_model_basename,
                           overwrite=overwrite)
    sampleRasterUnderPoints(flooded_areas_points_basename,
                            digital_elevation_model_basename,
                            ALTI_FIELD,
                            overwrite=overwrite)
    exportVectorOgr2Grass(flooded_areas_points_basename,
                          altitude_points,
                          format_vector=FORMAT_VECTOR_GRASS,
                          overwrite=overwrite)

    print(
        cyan + "classesOfWaterHeights() : " + bold + green +
        "ETAPE 2/6 - Fin de la récupération de l'altitude sous chaque point." +
        endC + '\n')

    #############
    # Etape 3/6 # Triangulation de l'altitude
    #############

    print(cyan + "classesOfWaterHeights() : " + bold + green +
          "ETAPE 3/6 - Début de la triangulation de l'altitude." + endC + '\n')

    pixel_size = abs(min(pixel_width, pixel_height))
    triangulationDelaunay(altitude_points,
                          altitude_grid,
                          ALTI_FIELD,
                          cellsize=pixel_size)

    print(cyan + "classesOfWaterHeights() : " + bold + green +
          "ETAPE 3/6 - Fin de la triangulation de l'altitude." + endC + '\n')

    #############
    # Etape 4/6 # Calcul des hauteurs brutes
    #############

    print(cyan + "classesOfWaterHeights() : " + bold + green +
          "ETAPE 4/6 - Début du calcul des hauteurs brutes." + endC + '\n')

    # Redécoupage sur l'emprise inondée
    cutRasterImages([altitude_grid, input_digital_elevation_model_file],
                    input_flooded_areas_vector,
                    [altitude_file, digital_elevation_model_cut],
                    0,
                    0,
                    epsg,
                    no_data_value,
                    "",
                    False,
                    path_time_log,
                    format_raster=format_raster,
                    format_vector=format_vector,
                    extension_raster=extension_raster,
                    extension_vector=extension_vector,
                    save_results_intermediate=save_results_intermediate,
                    overwrite=overwrite)

    # BandMath pour les hauteurs brutes (triangulation - MNT)
    expression = "im1b1 - im2b1"
    rasterCalculator([altitude_file, digital_elevation_model_cut],
                     raw_heights,
                     expression,
                     codage=ENCODING_RASTER_FLOAT)

    print(cyan + "classesOfWaterHeights() : " + bold + green +
          "ETAPE 4/6 - Fin du calcul des hauteurs brutes." + endC + '\n')

    #############
    # Etape 5/6 # Attribution des classes de hauteurs d'eau
    #############

    print(cyan + "classesOfWaterHeights() : " + bold + green +
          "ETAPE 5/6 - Début de l'attribution des classes de hauteurs d'eau." +
          endC + '\n')

    # Génération de l'expression
    expression = ""
    for i in range(thresholds_list_float_len - 1):
        min_threshold = thresholds_list_float[i]
        max_threshold = thresholds_list_float[i + 1]
        expression += "im1b1>=%s and im1b1<%s ? %s : " % (min_threshold,
                                                          max_threshold, i + 1)
    expression += "im1b1>=%s ? %s : 0" % (thresholds_list_float[
        thresholds_list_float_len - 1], thresholds_list_float_len)

    # Calcul des classes de hauteurs d'eau
    rasterCalculator([raw_heights],
                     heights_classes_temp,
                     expression,
                     codage=ENCODING_RASTER_UINT8)

    # Redécoupage propre des zones en dehors de l'emprise inondée
    cutImageByVector(input_flooded_areas_vector,
                     heights_classes_temp,
                     output_heights_classes_file,
                     pixel_size_x=pixel_width,
                     pixel_size_y=pixel_height,
                     no_data_value=no_data_value,
                     epsg=epsg,
                     format_raster=format_raster,
                     format_vector=format_vector)

    print(cyan + "classesOfWaterHeights() : " + bold + green +
          "ETAPE 5/6 - Fin de l'attribution des classes de hauteurs d'eau." +
          endC + '\n')

    #############
    # Etape 6/6 # Vectorisation des classes de hauteurs d'eau
    #############

    if output_heights_classes_vector != "":

        print(
            cyan + "classesOfWaterHeights() : " + bold + green +
            "ETAPE 6/6 - Début de la vectorisation des classes de hauteurs d'eau."
            + endC + '\n')

        name_column = 'class'
        umc_list = 0

        if VECTORISATION == 'GRASS':
            vectorizeGrassClassification(
                output_heights_classes_file,
                output_heights_classes_vector,
                name_column, [umc_list],
                False,
                True,
                True,
                input_flooded_areas_vector,
                True,
                path_time_log,
                expression="",
                format_vector=format_vector,
                extension_raster=extension_raster,
                extension_vector=extension_vector,
                save_results_intermediate=save_results_intermediate,
                overwrite=overwrite)
        else:
            vectorizeClassification(
                output_heights_classes_file,
                output_heights_classes_vector,
                name_column, [umc_list],
                2000,
                False,
                True,
                True,
                True,
                True,
                True,
                input_flooded_areas_vector,
                True,
                False,
                False, [0],
                path_time_log,
                expression="",
                format_vector=format_vector,
                extension_raster=extension_raster,
                extension_vector=extension_vector,
                save_results_intermediate=save_results_intermediate,
                overwrite=overwrite)

        print(
            cyan + "classesOfWaterHeights() : " + bold + green +
            "ETAPE 6/6 - Fin de la vectorisation des classes de hauteurs d'eau."
            + endC + '\n')

    else:
        print(
            cyan + "classesOfWaterHeights() : " + bold + yellow +
            "ETAPE 6/6 - Pas de vectorisation des classes de hauteurs d'eau demandée."
            + endC + '\n')

    # Suppression des fichiers temporaires
    if not save_results_intermediate:
        if debug >= 3:
            print(cyan + "classesOfWaterHeights() : " + endC +
                  "Suppression des fichiers temporaires." + endC + '\n')
        deleteDir(temp_directory)

    print(cyan + "classesOfWaterHeights() : " + bold + green +
          "FIN DES TRAITEMENTS" + endC + '\n')

    # Mise à jour du log
    ending_event = "classesOfWaterHeights() : Fin du traitement : "
    timeLine(path_time_log, ending_event)

    return
Exemple #6
0
def createMnh(image_mns_input, image_mnt_input, image_threshold_input, vector_emprise_input, image_mnh_output, automatic, bd_road_vector_input_list, bd_road_buff_list, sql_road_expression_list, bd_build_vector_input_list, height_bias, threshold_bd_value, threshold_delta_h, mode_interpolation, method_interpolation, interpolation_bco_radius, simplify_vector_param, epsg, no_data_value, ram_otb, path_time_log, format_raster='GTiff', format_vector='ESRI Shapefile', extension_raster=".tif", extension_vector=".shp", save_results_intermediate=False, overwrite=True):

    # Mise à jour du Log
    starting_event = "createMnh() : MNH creation starting : "
    timeLine(path_time_log,starting_event)

    print(endC)
    print(bold + green + "## START : MNH CREATION" + endC)
    print(endC)

    if debug >= 2:
        print(bold + green + "createMnh() : Variables dans la fonction" + endC)
        print(cyan + "createMnh() : " + endC + "image_mns_input : " + str(image_mns_input) + endC)
        print(cyan + "createMnh() : " + endC + "image_mnt_input : " + str(image_mnt_input) + endC)
        print(cyan + "createMnh() : " + endC + "image_threshold_input : " + str(image_threshold_input) + endC)
        print(cyan + "createMnh() : " + endC + "vector_emprise_input : " + str(vector_emprise_input) + endC)
        print(cyan + "createMnh() : " + endC + "image_mnh_output : " + str(image_mnh_output) + endC)
        print(cyan + "createMnh() : " + endC + "automatic : " + str(automatic) + endC)
        print(cyan + "createMnh() : " + endC + "bd_road_vector_input_list : " + str(bd_road_vector_input_list) + endC)
        print(cyan + "createMnh() : " + endC + "bd_road_buff_list : " + str(bd_road_buff_list) + endC)
        print(cyan + "createMnh() : " + endC + "sql_road_expression_list : " + str(sql_road_expression_list) + endC)
        print(cyan + "createMnh() : " + endC + "bd_build_vector_input_list : " + str(bd_build_vector_input_list) + endC)
        print(cyan + "createMnh() : " + endC + "height_bias : " + str(height_bias) + endC)
        print(cyan + "createMnh() : " + endC + "threshold_bd_value : " + str(threshold_bd_value) + endC)
        print(cyan + "createMnh() : " + endC + "threshold_delta_h : " + str(threshold_delta_h) + endC)
        print(cyan + "createMnh() : " + endC + "mode_interpolation : " + str(mode_interpolation) + endC)
        print(cyan + "createMnh() : " + endC + "method_interpolation : " + str(method_interpolation) + endC)
        print(cyan + "createMnh() : " + endC + "interpolation_bco_radius : " + str(interpolation_bco_radius) + endC)
        print(cyan + "createMnh() : " + endC + "simplify_vector_param : " + str(simplify_vector_param) + endC)
        print(cyan + "createMnh() : " + endC + "epsg : " + str(epsg) + endC)
        print(cyan + "createMnh() : " + endC + "no_data_value : " + str(no_data_value) + endC)
        print(cyan + "createMnh() : " + endC + "ram_otb : " + str(ram_otb) + endC)
        print(cyan + "createMnh() : " + endC + "path_time_log : " + str(path_time_log) + endC)
        print(cyan + "createMnh() : " + endC + "format_raster : " + str(format_raster) + endC)
        print(cyan + "createMnh() : " + endC + "format_vector : " + str(format_vector) + endC)
        print(cyan + "createMnh() : " + endC + "extension_raster : " + str(extension_raster) + endC)
        print(cyan + "createMnh() : " + endC + "extension_vector : " + str(extension_vector) + endC)
        print(cyan + "createMnh() : " + endC + "save_results_intermediate : " + str(save_results_intermediate) + endC)
        print(cyan + "createMnh() : " + endC + "overwrite : " + str(overwrite) + endC)

    # LES CONSTANTES
    PRECISION = 0.0000001

    CODAGE_8B = "uint8"
    CODAGE_F = "float"

    SUFFIX_CUT = "_cut"
    SUFFIX_CLEAN = "_clean"
    SUFFIX_SAMPLE = "_sample"
    SUFFIX_MASK = "_mask"
    SUFFIX_TMP = "_tmp"
    SUFFIX_MNS = "_mns"
    SUFFIX_MNT = "_mnt"
    SUFFIX_ROAD = "_road"
    SUFFIX_BUILD = "_build"
    SUFFIX_RASTER = "_raster"
    SUFFIX_VECTOR = "_vector"

    # DEFINIR LES REPERTOIRES ET FICHIERS TEMPORAIRES
    repertory_output = os.path.dirname(image_mnh_output)
    basename_mnh = os.path.splitext(os.path.basename(image_mnh_output))[0]

    sub_repertory_raster_temp = repertory_output + os.sep + basename_mnh + SUFFIX_RASTER + SUFFIX_TMP
    sub_repertory_vector_temp = repertory_output + os.sep + basename_mnh + SUFFIX_VECTOR + SUFFIX_TMP
    cleanTempData(sub_repertory_raster_temp)
    cleanTempData(sub_repertory_vector_temp)

    basename_vector_emprise = os.path.splitext(os.path.basename(vector_emprise_input))[0]
    basename_mns_input = os.path.splitext(os.path.basename(image_mns_input))[0]
    basename_mnt_input = os.path.splitext(os.path.basename(image_mnt_input))[0]

    image_mnh_tmp = sub_repertory_raster_temp + os.sep + basename_mnh + SUFFIX_TMP + extension_raster
    image_mnh_road = sub_repertory_raster_temp + os.sep + basename_mnh + SUFFIX_ROAD + extension_raster

    vector_bd_bati_temp = sub_repertory_vector_temp + os.sep + basename_mnh + SUFFIX_BUILD + SUFFIX_TMP + extension_vector
    vector_bd_bati = repertory_output + os.sep + basename_mnh + SUFFIX_BUILD + extension_vector
    raster_bd_bati = sub_repertory_vector_temp + os.sep + basename_mnh + SUFFIX_BUILD + extension_raster
    removeVectorFile(vector_bd_bati)

    image_emprise_mnt_mask = sub_repertory_raster_temp + os.sep + basename_vector_emprise + SUFFIX_MNT + extension_raster
    image_mnt_cut = sub_repertory_raster_temp + os.sep + basename_mnt_input + SUFFIX_CUT + extension_raster
    image_mnt_clean = sub_repertory_raster_temp + os.sep + basename_mnt_input + SUFFIX_CLEAN + extension_raster
    image_mnt_clean_sample = sub_repertory_raster_temp + os.sep + basename_mnt_input + SUFFIX_CLEAN + SUFFIX_SAMPLE + extension_raster
    image_emprise_mns_mask = sub_repertory_raster_temp + os.sep + basename_vector_emprise + SUFFIX_MNS + extension_raster
    image_mns_cut = sub_repertory_raster_temp + os.sep + basename_mns_input + SUFFIX_CUT + extension_raster
    image_mns_clean = sub_repertory_raster_temp + os.sep + basename_mns_input + SUFFIX_CLEAN + extension_raster

    vector_bd_road_temp = sub_repertory_vector_temp + os.sep + basename_mnh + SUFFIX_ROAD + SUFFIX_TMP + extension_vector
    raster_bd_road_mask = sub_repertory_raster_temp + os.sep + basename_mnh + SUFFIX_ROAD + SUFFIX_MASK + extension_raster

    if image_threshold_input != "" :
        basename_threshold_input = os.path.splitext(os.path.basename(image_threshold_input))[0]
        image_threshold_cut = sub_repertory_raster_temp + os.sep + basename_threshold_input + SUFFIX_CUT + extension_raster
        image_threshold_mask = sub_repertory_raster_temp + os.sep + basename_threshold_input + SUFFIX_MASK + extension_raster

    # VERIFICATION SI LE FICHIER DE SORTIE EXISTE DEJA
    # Si un fichier de sortie avec le même nom existe déjà, et si l'option ecrasement est à false, alors on ne fait rien
    check = os.path.isfile(image_mnh_output)
    if check and not overwrite:
        print(bold + yellow +  "createMnh() : " + endC + "Create mnh %s from %s and %s already done : no actualisation" % (image_mnh_output, image_mns_input, image_mnt_input) + endC)
    # Si non, ou si la fonction ecrasement est désative, alors on le calcule
    else:
        if check:
            try: # Suppression de l'éventuel fichier existant
                removeFile(image_mnh_output)
            except Exception:
                pass # Si le fichier ne peut pas être supprimé, on suppose qu'il n'existe pas et on passe à la suite

        # DECOUPAGE DES FICHIERS MS ET MNT D'ENTREE PAR LE FICHIER D'EMPRISE
        if debug >= 3:
            print(bold + green +  "createMnh() : " + endC + "Decoupage selon l'emprise des fichiers %s et %s " %(image_mns_input, image_mnt_input) + endC)

        # Fonction de découpe du mns
        if not cutImageByVector(vector_emprise_input, image_mns_input, image_mns_cut, None, None, no_data_value, epsg, format_raster, format_vector) :
            raise NameError (cyan + "createMnh() : " + bold + red + "!!! Une erreur c'est produite au cours du decoupage de l'image : " + image_mns_input + ". Voir message d'erreur." + endC)

        # Fonction de découpe du mnt
        if not cutImageByVector(vector_emprise_input, image_mnt_input, image_mnt_cut, None, None, no_data_value, epsg, format_raster, format_vector) :
            raise NameError (cyan + "createMnh() : " + bold + red + "!!! Une erreur c'est produite au cours du decoupage de l'image : " + image_mnt_input + ". Voir message d'erreur." + endC)

        if debug >= 3:
            print(bold + green +  "createMnh() : " + endC + "Decoupage des fichiers %s et %s complet" %(image_mns_cut, image_mnt_cut) + endC)


        # REBOUCHAGE DES TROUS DANS LE MNT D'ENTREE SI NECESSAIRE

        nodata_mnt = getNodataValueImage(image_mnt_cut)
        pixelNodataCount = countPixelsOfValue(image_mnt_cut, nodata_mnt)

        if pixelNodataCount > 0 :

            if debug >= 3:
                print(bold + green +  "createMnh() : " + endC + "Fill the holes MNT for  %s" %(image_mnt_cut) + endC)

            # Rasterisation du vecteur d'emprise pour creer un masque pour boucher les trous du MNT
            rasterizeBinaryVector(vector_emprise_input, image_mnt_cut, image_emprise_mnt_mask, 1, CODAGE_8B)

            # Utilisation de SAGA pour boucher les trous
            fillNodata(image_mnt_cut, image_emprise_mnt_mask, image_mnt_clean, save_results_intermediate)

            if debug >= 3:
                print(bold + green +  "createMnh() : " + endC + "Fill the holes MNT to %s completed" %(image_mnt_clean) + endC)

        else :
            image_mnt_clean = image_mnt_cut
            if debug >= 3:
                print(bold + green +  "\ncreateMnh() : " + endC + "Fill the holes not necessary MNT for %s" %(image_mnt_cut) + endC)


        # REBOUCHAGE DES TROUS DANS LE MNS D'ENTREE SI NECESSAIRE

        nodata_mns = getNodataValueImage(image_mns_cut)
        pixelNodataCount = countPixelsOfValue(image_mns_cut, nodata_mns)

        if pixelNodataCount > 0 :

            if debug >= 3:
                print(bold + green +  "createMnh() : " + endC + "Fill the holes MNS for  %s" %(image_mns_cut) + endC)

            # Rasterisation du vecteur d'emprise pour creer un masque pour boucher les trous du MNS
            rasterizeBinaryVector(vector_emprise_input, image_mns_cut, image_emprise_mns_mask, 1, CODAGE_8B)

            # Utilisation de SAGA pour boucher les trous
            fillNodata(image_mns_cut, image_emprise_mns_mask, image_mns_clean, save_results_intermediate)

            if debug >= 3:
                print(bold + green +  "\ncreateMnh() : " + endC + "Fill the holes MNS to %s completed" %(image_mns_clean) + endC)

        else :
            image_mns_clean = image_mns_cut
            if debug >= 3:
                print(bold + green +  "createMnh() : " + endC + "Fill the holes not necessary MNS for %s" %(image_mns_cut) + endC)

        # CALLER LE FICHIER MNT AU FORMAT DU FICHIER MNS

        # Commande de mise en place de la geométrie re-echantionage
        command = "otbcli_Superimpose -inr " + image_mns_clean + " -inm " + image_mnt_clean + " -mode " + mode_interpolation + " -interpolator " + method_interpolation + " -out " + image_mnt_clean_sample

        if method_interpolation.lower() == 'bco' :
            command += " -interpolator.bco.radius " + str(interpolation_bco_radius)
        if ram_otb > 0:
            command += " -ram %d" %(ram_otb)

        if debug >= 3:
            print(cyan + "createMnh() : " + bold + green + "Réechantillonage du fichier %s par rapport à la reference %s" %(image_mnt_clean, image_mns_clean) + endC)
            print(command)

        exit_code = os.system(command)
        if exit_code != 0:
            print(command)
            raise NameError (cyan + "createMnh() : " + bold + red + "!!! Une erreur c'est produite au cours du superimpose de l'image : " + image_mnt_input + ". Voir message d'erreur." + endC)

        # INCRUSTATION DANS LE MNH DES DONNEES VECTEURS ROUTES

        if debug >= 3:
            print(bold + green +  "createMnh() : " + endC + "Use BD road to clean MNH"  + endC)

        # Creation d'un masque de filtrage des donnes routes (exemple : le NDVI)
        if image_threshold_input != "" :
            if not cutImageByVector(vector_emprise_input, image_threshold_input, image_threshold_cut, None, None, no_data_value, epsg, format_raster, format_vector) :
                raise NameError (cyan + "createMnh() : " + bold + red + "!!! Une erreur c'est produite au cours du decoupage de l'image : " + image_threshold_input + ". Voir message d'erreur." + endC)
            createBinaryMask(image_threshold_cut, image_threshold_mask, threshold_bd_value, False, CODAGE_8B)

        # Execution de la fonction createMacroSamples pour une image correspondant au données routes
        if bd_road_vector_input_list != [] :
            createMacroSamples(image_mns_clean, vector_emprise_input, vector_bd_road_temp, raster_bd_road_mask, bd_road_vector_input_list, bd_road_buff_list, sql_road_expression_list, path_time_log, basename_mnh, simplify_vector_param, format_vector, extension_vector, save_results_intermediate, overwrite)

        if debug >= 3:
            print(bold + green +  "\ncreateMnh() : " + endC + "File raster from BD road is create %s" %(raster_bd_road_mask) + endC)

        # CALCUL DU MNH

        # Calcul par bandMath du MNH definir l'expression qui soustrait le MNT au MNS en introduisant le biais et en mettant les valeurs à 0 à une valeur approcher de 0.0000001
        delta = ""
        if height_bias > 0 :
            delta = "+%s" %(str(height_bias))
        elif height_bias < 0 :
            delta = "-%s" %(str(abs(height_bias)))
        else :
            delta = ""

        # Definition de l'expression
        if bd_road_vector_input_list != [] :
            if image_threshold_input != "" :
                expression = "\"im3b1 > 0 and im4b1 > 0?%s:(im1b1-im2b1%s) > 0.0?im1b1-im2b1%s:%s\"" %(str(PRECISION), delta, delta, str(PRECISION))
                command = "otbcli_BandMath -il %s %s %s %s -out %s %s -exp %s" %(image_mns_clean, image_mnt_clean_sample, raster_bd_road_mask, image_threshold_mask, image_mnh_tmp, CODAGE_F, expression)
            else :
                expression = "\"im3b1 > 0?%s:(im1b1-im2b1%s) > 0.0?im1b1-im2b1%s:%s\"" %(str(PRECISION), delta, delta, str(PRECISION))
                command = "otbcli_BandMath -il %s %s %s -out %s %s -exp %s" %(image_mns_clean, image_mnt_clean_sample, raster_bd_road_mask, image_mnh_tmp, CODAGE_F, expression)
        else :
            expression = "\"(im1b1-im2b1%s) > 0.0?im1b1-im2b1%s:%s\"" %(delta, delta, str(PRECISION))
            command = "otbcli_BandMath -il %s %s -out %s %s -exp %s" %(image_mns_clean, image_mnt_clean_sample, image_mnh_tmp, CODAGE_F, expression)

        if ram_otb > 0:
            command += " -ram %d" %(ram_otb)

        if debug >= 3:
            print(cyan + "createMnh() : " + bold + green + "Calcul du MNH  %s difference du MNS : %s par le MNT :%s" %(image_mnh_tmp, image_mns_clean, image_mnt_clean_sample) + endC)
            print(command)

        exitCode = os.system(command)
        if exitCode != 0:
            print(command)
            raise NameError(cyan + "createMnh() : " + bold + red + "An error occured during otbcli_BandMath command to compute MNH " + image_mnh_tmp + ". See error message above." + endC)

        # DECOUPAGE DU MNH

        if bd_build_vector_input_list == []:
            image_mnh_road = image_mnh_output

        if debug >= 3:
            print(bold + green +  "createMnh() : " + endC + "Decoupage selon l'emprise du fichier mnh %s " %(image_mnh_tmp) + endC)

        # Fonction de découpe du mnh
        if not cutImageByVector(vector_emprise_input, image_mnh_tmp, image_mnh_road, None, None, no_data_value, epsg, format_raster, format_vector) :
            raise NameError (cyan + "createMnh() : " + bold + red + "!!! Une erreur c'est produite au cours du decoupage de l'image : " + image_mns_input + ". Voir message d'erreur." + endC)

        if debug >= 3:
            print(bold + green +  "createMnh() : " + endC + "Decoupage du fichier mnh %s complet" %(image_mnh_road) + endC)

        # INCRUSTATION DANS LE MNH DES DONNEES VECTEURS BATIS

        # Si demander => liste de fichier vecteur bati passé en donnée d'entrée
        if bd_build_vector_input_list != []:

            # Découpage des vecteurs de bd bati exogenes avec l'emprise
            vectors_build_cut_list = []
            for vector_build_input in bd_build_vector_input_list :
                vector_name = os.path.splitext(os.path.basename(vector_build_input))[0]
                vector_build_cut = sub_repertory_vector_temp + os.sep + vector_name + SUFFIX_CUT + extension_vector
                vectors_build_cut_list.append(vector_build_cut)
            cutoutVectors(vector_emprise_input, bd_build_vector_input_list, vectors_build_cut_list, format_vector)

            # Fusion des vecteurs batis découpés
            fusionVectors (vectors_build_cut_list, vector_bd_bati_temp)

            # Croisement vecteur rasteur entre le vecteur fusion des batis et le MNH créé precedement
            statisticsVectorRaster(image_mnh_road, vector_bd_bati_temp, "", 1, False, False, True, ['PREC_PLANI','PREC_ALTI','ORIGIN_BAT','median','sum','std','unique','range'], [], {}, path_time_log, True, format_vector, save_results_intermediate, overwrite)

            # Calcul de la colonne delta_H entre les hauteurs des batis et la hauteur moyenne du MNH sous le bati
            COLUMN_ID = "ID"
            COLUMN_H_BUILD = "HAUTEUR"
            COLUMN_H_BUILD_MIN = "Z_MIN"
            COLUMN_H_BUILD_MAX = "Z_MAX"
            COLUMN_H_MNH = "mean"
            COLUMN_H_MNH_MIN = "min"
            COLUMN_H_MNH_MAX = "max"
            COLUMN_H_DIFF = "H_diff"

            field_type = ogr.OFTReal
            field_value = 0.0
            field_width = 20
            field_precision = 2
            attribute_name_dico = {}
            attribute_name_dico[COLUMN_ID] = ogr.OFTString
            attribute_name_dico[COLUMN_H_BUILD] = ogr.OFTReal
            attribute_name_dico[COLUMN_H_MNH] = ogr.OFTReal

            # Ajouter la nouvelle colonne H_diff
            addNewFieldVector(vector_bd_bati_temp, COLUMN_H_DIFF, field_type, field_value, field_width, field_precision, format_vector)

            # Recuperer les valeur de hauteur du bati et du mnt dans le vecteur
            data_z_dico = getAttributeValues(vector_bd_bati_temp, None, None, attribute_name_dico, format_vector)

            # Calculer la difference des Hauteur bati et mnt
            field_new_values_dico = {}
            for index in range(len(data_z_dico[COLUMN_ID])) :
                index_polygon = data_z_dico[COLUMN_ID][index]
                delta_h = abs(data_z_dico[COLUMN_H_BUILD][index] - data_z_dico[COLUMN_H_MNH][index])
                field_new_values_dico[index_polygon] = {COLUMN_H_DIFF:delta_h}

            # Mettre à jour la colonne H_diff dans le vecteur
            setAttributeIndexValuesList(vector_bd_bati_temp, COLUMN_ID, field_new_values_dico, format_vector)

            # Suppression de tous les polygones bati dons la valeur du delat H est inferieur à threshold_delta_h
            column = "'%s, %s, %s, %s, %s, %s, %s, %s'"% (COLUMN_ID, COLUMN_H_BUILD, COLUMN_H_BUILD_MIN, COLUMN_H_BUILD_MAX, COLUMN_H_MNH, COLUMN_H_MNH_MIN, COLUMN_H_MNH_MAX, COLUMN_H_DIFF)
            expression = "%s > %s" % (COLUMN_H_DIFF, threshold_delta_h)
            filterSelectDataVector(vector_bd_bati_temp, vector_bd_bati, column, expression, overwrite, format_vector)

            # Attention!!!! PAUSE pour trie et verification des polygones bati nom deja present dans le MNH ou non
            if not automatic :
                print(bold + blue +  "Application MnhCreation => " + endC + "Vérification manuelle du vecteur bati %s pour ne concerver que les batis non présent dans le MNH courant %s" %(vector_bd_bati_temp, image_mnh_road) + endC)
                input(bold + red + "Appuyez sur entree pour continuer le programme..." + endC)

            # Creation du masque bati avec pour H la hauteur des batiments
            rasterizeVector(vector_bd_bati, raster_bd_bati, image_mnh_road, COLUMN_H_BUILD)

            # Fusion du mask des batis et du MNH temporaire
            expression = "\"im1b1 > 0.0?im1b1:im2b1\""
            command = "otbcli_BandMath -il %s %s -out %s %s -exp %s" %(raster_bd_bati, image_mnh_road, image_mnh_output, CODAGE_F, expression)

            if ram_otb > 0:
                command += " -ram %d" %(ram_otb)

            if debug >= 3:
                print(cyan + "createMnh() : " + bold + green + "Amelioration du MNH  %s ajout des hauteurs des batis %s" %(image_mnh_road, raster_bd_bati) + endC)
                print(command)

            exitCode = os.system(command)
            if exitCode != 0:
                print(command)
                raise NameError(cyan + "createMnh() : " + bold + red + "An error occured during otbcli_BandMath command to compute MNH Final" + image_mnh_output + ". See error message above." + endC)

    # SUPPRESIONS FICHIERS INTERMEDIAIRES INUTILES

    # Suppression des fichiers intermédiaires
    if not save_results_intermediate :
        if bd_build_vector_input_list != []:
            removeFile(image_mnh_road)
        removeFile(image_threshold_cut)
        removeFile(image_threshold_mask)
        removeFile(raster_bd_bati)
        removeVectorFile(vector_bd_road_temp)
        removeVectorFile(vector_bd_bati_temp)
        removeVectorFile(vector_bd_bati) # A confirmer!!!
        removeFile(raster_bd_road_mask)
        removeFile(image_mnh_tmp)
        deleteDir(sub_repertory_raster_temp)
        deleteDir(sub_repertory_vector_temp)

    print(endC)
    print(bold + green + "## END : MNH CREATION" + endC)
    print(endC)

    # Mise à jour du Log
    ending_event = "createMnh() : MNH creation ending : "
    timeLine(path_time_log,ending_event)

    return
Exemple #7
0
def comparareClassificationToReferenceGrid(image_input,
                                           vector_cut_input,
                                           vector_sample_input,
                                           vector_grid_input,
                                           vector_grid_output,
                                           size_grid,
                                           field_value_verif,
                                           no_data_value,
                                           path_time_log,
                                           epsg=2154,
                                           format_raster='GTiff',
                                           format_vector="ESRI Shapefile",
                                           extension_raster=".tif",
                                           extension_vector=".shp",
                                           save_results_intermediate=False,
                                           overwrite=True):

    # Mise à jour du Log
    starting_event = "comparareClassificationToReferenceGrid() : starting : "
    timeLine(path_time_log, starting_event)

    print(endC)
    print(bold + green +
          "## START : COMPARE QUALITY FROM CLASSIF IMAGE BY GRID" + endC)
    print(endC)

    if debug >= 2:
        print(
            bold + green +
            "comparareClassificationToReferenceGrid() : Variables dans la fonction"
            + endC)
        print(cyan + "comparareClassificationToReferenceGrid() : " + endC +
              "image_input : " + str(image_input) + endC)
        print(cyan + "comparareClassificationToReferenceGrid() : " + endC +
              "vector_cut_input : " + str(vector_cut_input) + endC)
        print(cyan + "comparareClassificationToReferenceGrid() : " + endC +
              "vector_sample_input : " + str(vector_sample_input) + endC)
        print(cyan + "comparareClassificationToReferenceGrid() : " + endC +
              "vector_grid_input : " + str(vector_grid_input) + endC)
        print(cyan + "comparareClassificationToReferenceGrid() : " + endC +
              "vector_grid_output : " + str(vector_grid_output) + endC)
        print(cyan + "comparareClassificationToReferenceGrid() : " + endC +
              "size_grid : " + str(size_grid) + endC)
        print(cyan + "comparareClassificationToReferenceGrid() : " + endC +
              "field_value_verif : " + str(field_value_verif))
        print(cyan + "comparareClassificationToReferenceGrid() : " + endC +
              "no_data_value : " + str(no_data_value))
        print(cyan + "comparareClassificationToReferenceGrid() : " + endC +
              "path_time_log : " + str(path_time_log) + endC)
        print(cyan + "comparareClassificationToReferenceGrid() : " + endC +
              "epsg  : " + str(epsg) + endC)
        print(cyan + "comparareClassificationToReferenceGrid() : " + endC +
              "format_raster : " + str(format_raster) + endC)
        print(cyan + "comparareClassificationToReferenceGrid() : " + endC +
              "format_vector : " + str(format_vector) + endC)
        print(cyan + "comparareClassificationToReferenceGrid() : " + endC +
              "extension_raster : " + str(extension_raster) + endC)
        print(cyan + "comparareClassificationToReferenceGrid() : " + endC +
              "extension_vector : " + str(extension_vector) + endC)
        print(cyan + "comparareClassificationToReferenceGrid() : " + endC +
              "save_results_intermediate : " + str(save_results_intermediate) +
              endC)
        print(cyan + "comparareClassificationToReferenceGrid() : " + endC +
              "overwrite : " + str(overwrite) + endC)

    # ETAPE 0 : PREPARATION DES FICHIERS INTERMEDIAIRES'

    CODAGE = "uint16"
    SUFFIX_STUDY = '_study'
    SUFFIX_TEMP = '_temp'
    SUFFIX_FUSION = '_other_fusion'

    NONE_VALUE_QUANTITY = -1.0
    FIELD_VALUE_OTHER = 65535

    FIELD_NAME_ID = "id"
    FIELD_NAME_RATE_BUILD = "rate_build"
    FIELD_NAME_RATE_OTHER = "rate_other"
    FIELD_NAME_SREF_BUILD = "sref_build"
    FIELD_NAME_SCLA_BUILD = "scla_build"
    FIELD_NAME_SREF_OTHER = "sref_other"
    FIELD_NAME_SCLA_OTHER = "scla_other"
    FIELD_NAME_KAPPA = "kappa"
    FIELD_NAME_ACCURACY = "accuracy"

    pixel_size_x, pixel_size_y = getPixelWidthXYImage(image_input)

    repertory_output = os.path.dirname(vector_grid_output)
    base_name = os.path.splitext(os.path.basename(vector_grid_output))[0]

    vector_study = repertory_output + os.sep + base_name + SUFFIX_STUDY + extension_vector
    vector_grid_temp = repertory_output + os.sep + base_name + SUFFIX_TEMP + extension_vector
    image_raster_other_fusion = repertory_output + os.sep + base_name + SUFFIX_FUSION + extension_raster

    # ETAPE 0 : VERIFICATION

    # Verification de la valeur de la nomemclature à verifier
    if field_value_verif >= FIELD_VALUE_OTHER:
        print(
            cyan + "comparareClassificationToReferenceGrid() : " + bold + red +
            "Attention de valeur de nomenclature à vérifier  : " +
            str(field_value_verif) +
            " doit être inferieur à la valeur de fusion des valeur autre arbitraire de : "
            + str(FIELD_VALUE_OTHER) + endC,
            file=sys.stderr)
        sys.exit(1)  #exit with an error code

    # ETAPE 1 : DEFINIR UN SHAPE ZONE D'ETUDE

    if (not vector_cut_input is None) and (vector_cut_input != "") and (
            os.path.isfile(vector_cut_input)):
        cutting_action = True
        vector_study = vector_cut_input
    else:
        cutting_action = False
        createVectorMask(image_input, vector_study)

    # ETAPE 2 : UNIFORMISATION DE LA ZONE OTHER

    # Réalocation des valeurs de classification pour les valeurs autre que le bati
    change_reaff_value_list = []
    reaff_value_list = identifyPixelValues(image_input)
    if field_value_verif in reaff_value_list:
        reaff_value_list.remove(field_value_verif)
    if no_data_value in reaff_value_list:
        reaff_value_list.remove(no_data_value)
    for elem in reaff_value_list:
        change_reaff_value_list.append(FIELD_VALUE_OTHER)
    reallocateClassRaster(image_input, image_raster_other_fusion,
                          reaff_value_list, change_reaff_value_list)

    # ETAPE 3 : CREATION DE LA GRILLE SUR LA ZONE D'ETUDE

    # Définir les attibuts du fichier
    attribute_dico = {
        FIELD_NAME_ID: ogr.OFTInteger,
        FIELD_NAME_RATE_BUILD: ogr.OFTReal,
        FIELD_NAME_RATE_OTHER: ogr.OFTReal,
        FIELD_NAME_SREF_BUILD: ogr.OFTReal,
        FIELD_NAME_SCLA_BUILD: ogr.OFTReal,
        FIELD_NAME_SREF_OTHER: ogr.OFTReal,
        FIELD_NAME_SCLA_OTHER: ogr.OFTReal,
        FIELD_NAME_KAPPA: ogr.OFTReal,
        FIELD_NAME_ACCURACY: ogr.OFTReal
    }
    nb_polygon = 0

    if (not vector_grid_input is None) and (vector_grid_input != "") and (
            os.path.isfile(vector_grid_input)):
        # Utilisation du fichier grille d'entrée

        # Recopie du fichier grille d'entrée vers le fichier grille de sortie
        copyVectorFile(vector_grid_input, vector_grid_output)

        # Ajout des champs au fichier grille de sortie
        for field_name in attribute_dico:
            addNewFieldVector(vector_grid_output, field_name,
                              attribute_dico[field_name], None, None, None,
                              format_vector)

        # Mettre le champs "id" identifiant du carré de l'élément de la grille
        nb_polygon = updateIndexVector(vector_grid_output, FIELD_NAME_ID,
                                       format_vector)

    else:
        # Si il n'existe pas de fichier grille on en créer un avec la valeur de size_grid

        # Creer le fichier grille
        nb_polygon = createGridVector(vector_study, vector_grid_temp,
                                      size_grid, size_grid, attribute_dico,
                                      overwrite, epsg, format_vector)

        # Découper la grille avec le shape zone d'étude
        cutVectorAll(vector_study, vector_grid_temp, vector_grid_output,
                     format_vector)

    # ETAPE 4 : CALCUL DE L'INDICATEUR DE QUALITE POUR CHAQUE CASE DE LA GRILLE

    if debug >= 2:
        print(bold + "nb_polygon = " + endC + str(nb_polygon) + "\n")

    # Pour chaque polygone existant
    sum_rate_quantity_build = 0
    nb_rate_sum = 0
    size_area_pixel = abs(pixel_size_x * pixel_size_y)

    for id_polygon in range(nb_polygon):
        geom_list = getGeomPolygons(vector_grid_output, FIELD_NAME_ID,
                                    id_polygon, format_vector)
        if geom_list is not None and geom_list != []:  # and (id_polygon == 24 or id_polygon == 30):

            if debug >= 1:
                print(cyan + "comparareClassificationToReferenceGrid() : " +
                      bold + green +
                      "Calcul de la matrice pour le polygon n°: " +
                      str(id_polygon) + endC)

            geom = geom_list[0]
            class_ref_list, class_pro_list, rate_quantity_list, kappa, accuracy, matrix = computeQualityIndiceRateQuantity(
                image_raster_other_fusion, vector_sample_input,
                repertory_output, base_name + str(id_polygon), geom, size_grid,
                pixel_size_x, pixel_size_y, field_value_verif,
                FIELD_VALUE_OTHER, no_data_value, epsg, format_raster,
                format_vector, extension_raster, extension_vector, overwrite,
                save_results_intermediate)

            # Si les calculs indicateurs de qualité sont ok
            if debug >= 2:
                print(matrix)
            if matrix != None and matrix != [] and matrix[0] != []:

                # Récuperer la quantité de bati et calcul de la surface de référence et de la surface de classification (carreau entier ou pas!)
                if len(class_ref_list) == 2 and len(
                        class_pro_list
                ) == 2:  # Cas ou l'on a des pixels de build et other (en ref et en prod)
                    rate_quantity_build = rate_quantity_list[0]
                    rate_quantity_other = rate_quantity_list[1]
                    size_area_ref_build = (matrix[0][0] +
                                           matrix[0][1]) * size_area_pixel
                    size_area_classif_build = (matrix[0][0] +
                                               matrix[1][0]) * size_area_pixel
                    size_area_ref_other = (matrix[1][0] +
                                           matrix[1][1]) * size_area_pixel
                    size_area_classif_other = (matrix[0][1] +
                                               matrix[1][1]) * size_area_pixel
                    sum_rate_quantity_build += rate_quantity_build
                    nb_rate_sum += 1

                else:  # Cas ou l'on a uniquement des pixels de build OU uniquement des pixels de other

                    if class_ref_list[
                            0] == field_value_verif:  # Cas ou l'on a uniquement des pixels references build
                        rate_quantity_build = rate_quantity_list[0]
                        rate_quantity_other = NONE_VALUE_QUANTITY
                        size_area_ref_other = 0

                        if len(
                                class_pro_list
                        ) == 2:  # Cas ou l'on a des pixels de prod build et other
                            size_area_ref_build = (
                                matrix[0][0] + matrix[0][1]) * size_area_pixel
                            size_area_classif_build = matrix[0][
                                0] * size_area_pixel
                            size_area_classif_other = matrix[0][
                                1] * size_area_pixel

                        else:
                            size_area_ref_build = matrix[0][0] * size_area_pixel
                            if class_pro_list[
                                    0] == field_value_verif:  # Cas ou l'on a uniquement des pixels prod build
                                size_area_classif_build = matrix[0][
                                    0] * size_area_pixel
                                size_area_classif_other = 0

                            else:  # Cas ou l'on a uniquement des pixels prod other
                                size_area_classif_build = 0
                                size_area_classif_other = matrix[0][
                                    0] * size_area_pixel

                    else:  # Cas ou l'on a uniquement des pixels references other
                        rate_quantity_build = NONE_VALUE_QUANTITY
                        rate_quantity_other = rate_quantity_list[0]
                        size_area_ref_build = 0

                        if len(
                                class_pro_list
                        ) == 2:  # Cas ou l'on a des pixels de prod build et other
                            size_area_ref_other = (
                                matrix[0][0] + matrix[0][1]) * size_area_pixel
                            size_area_classif_build = matrix[0][
                                0] * size_area_pixel
                            size_area_classif_other = matrix[0][
                                1] * size_area_pixel

                        else:
                            size_area_ref_other = matrix[0][0] * size_area_pixel
                            if class_pro_list[
                                    0] == field_value_verif:  # Cas ou l'on a uniquement des pixels prod build
                                size_area_classif_build = matrix[0][
                                    0] * size_area_pixel
                                size_area_classif_other = 0

                            else:  # Cas ou l'on a uniquement des pixels prod other
                                size_area_classif_build = 0
                                size_area_classif_other = matrix[0][
                                    0] * size_area_pixel

                # Mettre à jour ses éléments du carré de la grille
                setAttributeValues(
                    vector_grid_output, FIELD_NAME_ID, id_polygon, {
                        FIELD_NAME_RATE_BUILD: rate_quantity_build,
                        FIELD_NAME_RATE_OTHER: rate_quantity_other,
                        FIELD_NAME_SREF_BUILD: size_area_ref_build,
                        FIELD_NAME_SCLA_BUILD: size_area_classif_build,
                        FIELD_NAME_SREF_OTHER: size_area_ref_other,
                        FIELD_NAME_SCLA_OTHER: size_area_classif_other,
                        FIELD_NAME_KAPPA: kappa,
                        FIELD_NAME_ACCURACY: accuracy
                    }, format_vector)

    # Calcul de la moyenne
    if nb_rate_sum != 0:
        average_quantity_build = sum_rate_quantity_build / nb_rate_sum
    else:
        average_quantity_build = 0
    if debug >= 2:
        print(bold + "nb_polygon_used = " + endC + str(nb_rate_sum))
        print(bold + "average_quantity_build = " + endC +
              str(average_quantity_build) + "\n")

    # ETAPE 5 : SUPPRESIONS FICHIERS INTERMEDIAIRES INUTILES

    # Suppression des données intermédiairess
    if not save_results_intermediate:

        if not cutting_action:
            if os.path.isfile(vector_study):
                removeVectorFile(vector_study)

        if os.path.isfile(image_raster_other_fusion):
            removeFile(image_raster_other_fusion)

        if os.path.isfile(vector_grid_temp):
            removeVectorFile(vector_grid_temp)

    print(endC)
    print(bold + green +
          "## END : COMPARE QUALITY FROM CLASSIF IMAGE BY GRID" + endC)
    print(endC)

    # Mise à jour du Log
    ending_event = "comparareClassificationToReferenceGrid() :  ending : "
    timeLine(path_time_log, ending_event)

    return average_quantity_build
Exemple #8
0
def computeQualityIndiceRateQuantity(raster_input,
                                     vector_sample_input,
                                     repertory_output,
                                     base_name,
                                     geom,
                                     size_grid,
                                     pixel_size_x,
                                     pixel_size_y,
                                     field_value_verif,
                                     field_value_other,
                                     no_data_value,
                                     epsg,
                                     format_raster,
                                     format_vector,
                                     extension_raster,
                                     extension_vector,
                                     overwrite=True,
                                     save_results_intermediate=False):

    # Définition des constantes
    EXT_TXT = '.txt'
    SUFFIX_STUDY = '_study'
    SUFFIX_CUT = '_cut'
    SUFFIX_BUILD = '_build'
    SUFFIX_OTHER = '_other'
    SUFFIX_LOCAL = '_local'
    SUFFIX_MATRIX = '_matrix'

    FIELD_NAME_CLASSIF = "classif"
    FIELD_TYPE = ogr.OFTInteger

    # Les variables locales
    vector_local_study = repertory_output + os.sep + base_name + SUFFIX_LOCAL + SUFFIX_STUDY + extension_vector
    vector_local_cut_study = repertory_output + os.sep + base_name + SUFFIX_LOCAL + SUFFIX_CUT + SUFFIX_STUDY + extension_vector
    vector_local_cut_build = repertory_output + os.sep + base_name + SUFFIX_LOCAL + SUFFIX_CUT + SUFFIX_BUILD + extension_vector
    vector_local_cut_other = repertory_output + os.sep + base_name + SUFFIX_LOCAL + SUFFIX_CUT + SUFFIX_OTHER + extension_vector
    vector_local_cut = repertory_output + os.sep + base_name + SUFFIX_LOCAL + SUFFIX_CUT + extension_vector
    raster_local_cut = repertory_output + os.sep + base_name + SUFFIX_LOCAL + SUFFIX_CUT + extension_raster
    matrix_local_file = repertory_output + os.sep + base_name + SUFFIX_LOCAL + SUFFIX_CUT + SUFFIX_MATRIX + EXT_TXT

    class_ref_list = None
    class_pro_list = None
    rate_quantity_list = None
    matrix_origine = None
    kappa = 0.0
    overall_accuracy = 0.0

    # Netoyage les fichiers de travail local
    if os.path.isfile(vector_local_study):
        removeVectorFile(vector_local_study)
    if os.path.isfile(vector_local_cut_study):
        removeVectorFile(vector_local_cut_study)
    if os.path.isfile(vector_local_cut):
        removeVectorFile(vector_local_cut)
    if os.path.isfile(vector_local_cut_build):
        removeVectorFile(vector_local_cut_build)
    if os.path.isfile(vector_local_cut_other):
        removeVectorFile(vector_local_cut_other)
    if os.path.isfile(raster_local_cut):
        removeFile(raster_local_cut)
    if os.path.isfile(matrix_local_file):
        removeFile(matrix_local_file)

    # Creation d'un shape file de travail local
    polygon_attr_geom_dico = {"1": [geom, {}]}
    createPolygonsFromGeometryList({}, polygon_attr_geom_dico,
                                   vector_local_study, epsg, format_vector)

    # Découpe sur zone local d'étude du fichier vecteur de référence
    cutVector(vector_local_study, vector_sample_input, vector_local_cut_build,
              format_vector)
    differenceVector(vector_local_cut_build, vector_local_study,
                     vector_local_cut_other, format_vector)

    addNewFieldVector(vector_local_cut_build, FIELD_NAME_CLASSIF, FIELD_TYPE,
                      field_value_verif, None, None, format_vector)
    addNewFieldVector(vector_local_cut_other, FIELD_NAME_CLASSIF, FIELD_TYPE,
                      field_value_other, None, None, format_vector)
    input_shape_list = [vector_local_cut_build, vector_local_cut_other]
    fusionVectors(input_shape_list, vector_local_cut)

    # Découpe sur zone local d'étude du fichier rasteur de classification
    if not cutImageByVector(vector_local_study, raster_input, raster_local_cut,
                            pixel_size_x, pixel_size_y, no_data_value, 0,
                            format_raster, format_vector):
        return class_ref_list, class_pro_list, rate_quantity_list, kappa, overall_accuracy, matrix_origine

    # Calcul de la matrice de confusion
    computeConfusionMatrix(raster_local_cut, vector_local_cut, "",
                           FIELD_NAME_CLASSIF, matrix_local_file, overwrite)

    # lecture de la matrice de confusion
    matrix, class_ref_list, class_pro_list = readConfusionMatrix(
        matrix_local_file)
    matrix_origine = copy.deepcopy(matrix)

    if matrix == []:
        print(
            cyan + "computeQualityIndiceRateQuantity() : " + bold + yellow +
            "!!! Une erreur c'est produite au cours de la lecture de la matrice de confusion : "
            + matrix_local_file + ". Voir message d'erreur." + endC)
        matrix_origine = None
        return class_ref_list, class_pro_list, rate_quantity_list, kappa, overall_accuracy, matrix_origine

    # Correction de la matrice de confusion
    # Dans le cas ou le nombre de microclasses des échantillons de controles
    # et le nombre de microclasses de la classification sont différents
    class_missing_list = []
    if class_ref_list != class_pro_list:
        matrix, class_missing_list = correctMatrix(class_ref_list,
                                                   class_pro_list, matrix,
                                                   no_data_value)

    class_count = len(matrix[0]) - len(class_missing_list)

    # Calcul des indicateurs de qualité : rate_quantity_list
    precision_list, recall_list, fscore_list, performance_list, rate_false_positive_list, rate_false_negative_list, rate_quantity_list, class_list, overall_accuracy, overall_fscore, overall_performance, kappa = computeIndicators(
        class_count, matrix, class_ref_list, class_missing_list)

    # Chercher si une ligne no data existe si c'est le cas correction de la matrice
    if str(no_data_value) in class_pro_list:
        pos_col_nodata = class_pro_list.index(str(no_data_value))
        for line in matrix_origine:
            del line[pos_col_nodata]

        class_pro_list.remove(str(no_data_value))

    # Suppression des données temporaires locales
    if not save_results_intermediate:
        if os.path.isfile(vector_local_study):
            removeVectorFile(vector_local_study)

        if os.path.isfile(vector_local_cut_study):
            removeVectorFile(vector_local_cut_study)

        if os.path.isfile(vector_local_cut):
            removeVectorFile(vector_local_cut)

        if os.path.isfile(vector_local_cut_build):
            removeVectorFile(vector_local_cut_build)

        if os.path.isfile(vector_local_cut_other):
            removeVectorFile(vector_local_cut_other)

        if os.path.isfile(raster_local_cut):
            removeFile(raster_local_cut)

        if os.path.isfile(matrix_local_file):
            removeFile(matrix_local_file)

    return class_ref_list, class_pro_list, rate_quantity_list, kappa, overall_accuracy, matrix_origine
Exemple #9
0
def computeRoughnessByOcsMnh( grid_input, grid_output, mnh_input, classif_input, class_build_list, epsg, no_data_value, path_time_log, format_raster='GTiff', format_vector='ESRI Shapefile', extension_raster=".tif", extension_vector=".shp", save_results_intermediate=False, overwrite=True):

    # Constante
    FIELD_H_TYPE = ogr.OFTReal
    FIELD_ID_TYPE = ogr.OFTInteger
    FIELD_NAME_HSUM = "sum_h"
    FIELD_NAME_HRE = "mean_h"
    FIELD_NAME_AREA = "nb_area"
    FIELD_NAME_ID = "id"

    SUFFIX_HEIGHT = '_hauteur'
    SUFFIX_BUILT = '_bati'
    SUFFIX_TEMP = '_temp'
    SUFFIX_MASK = '_mask'

    # Mise à jour du Log
    timeLine(path_time_log, "Début du calcul de l'indicateur Height of Roughness Elements par OCS et MNT starting : ")
    print(cyan + "computeRoughnessByOcsMnh() : " + endC + "Début du calcul de l'indicateur Height of Roughness Elements par OCS et MNT." + endC + "\n")

    if debug >= 3:
        print(bold + green + "computeRoughnessByOcsMnh() : Variables dans la fonction" + endC)
        print(cyan + "computeRoughnessByOcsMnh() : " + endC + "grid_input : " + str(grid_input) + endC)
        print(cyan + "computeRoughnessByOcsMnh() : " + endC + "grid_output : " + str(grid_output) + endC)
        print(cyan + "computeRoughnessByOcsMnh() : " + endC + "mnh_input : " + str(mnh_input) + endC)
        print(cyan + "computeRoughnessByOcsMnh() : " + endC + "classif_input : " + str(classif_input) + endC)
        print(cyan + "computeRoughnessByOcsMnh() : " + endC + "class_build_list : " + str(class_build_list) + endC)
        print(cyan + "computeRoughnessByOcsMnh() : " + endC + "epsg : " + str(epsg) + endC)
        print(cyan + "computeRoughnessByOcsMnh() : " + endC + "no_data_value : " + str(no_data_value) + endC)
        print(cyan + "computeRoughnessByOcsMnh() : " + endC + "path_time_log : " + str(path_time_log) + endC)
        print(cyan + "computeRoughnessByOcsMnh() : " + endC + "format_vector : " + str(format_vector) + endC)
        print(cyan + "computeRoughnessByOcsMnh() : " + endC + "save_results_intermediate : " + str(save_results_intermediate) + endC)
        print(cyan + "computeRoughnessByOcsMnh() : " + endC + "overwrite : " + str(overwrite) + endC)

    # Test si le vecteur de sortie existe déjà et si il doit être écrasés
    check = os.path.isfile(grid_output)

    if check and not overwrite: # Si le fichier de sortie existent deja et que overwrite n'est pas activé
        print(cyan + "computeRoughnessByOcsMnh() : " + bold + yellow + "Le calcul de Roughness par OCS et MNT a déjà eu lieu." + endC + "\n")
        print(cyan + "computeRoughnessByOcsMnh() : " + bold + yellow + "Grid vector output : " + grid_output + " already exists and will not be created again." + endC)
    else :
        if check:
            try:
                removeVectorFile(grid_output)
            except Exception:
                pass # si le fichier n'existe pas, il ne peut pas être supprimé : cette étape est ignorée

        ############################################
        ### Préparation générale des traitements ###
        ############################################

        # Récuperation de la projection de l'image
        epsg_proj = getProjectionImage(classif_input)
        if epsg_proj == 0:
            epsg_proj = epsg

        # Préparation des fichiers temporaires
        temp_path = os.path.dirname(grid_output) + os.sep + "RoughnessByOcsAndMnh"

        # Nettoyage du repertoire temporaire si il existe
        if os.path.exists(temp_path):
            shutil.rmtree(temp_path)
        os.makedirs(temp_path)

        basename = os.path.splitext(os.path.basename(grid_output))[0]
        built_height = temp_path + os.sep + basename + SUFFIX_HEIGHT + SUFFIX_BUILT + extension_raster
        built_height_temp = temp_path + os.sep + basename + SUFFIX_HEIGHT + SUFFIX_BUILT + SUFFIX_TEMP + extension_raster
        classif_built_mask = temp_path + os.sep + basename + SUFFIX_BUILT + SUFFIX_MASK + extension_raster
        grid_output_temp = temp_path + os.sep + basename + SUFFIX_TEMP + extension_vector

        ##############################
        ### Calcul de l'indicateur ###
        ##############################

        # liste des classes de bati a prendre en compte dans l'expression du BandMath
        expression_bati = ""
        for id_class in class_build_list :
            expression_bati += "im1b1==%s or " %(str(id_class))
        expression_bati = expression_bati[:-4]
        expression = "(%s) and (im2b1!=%s) and (im2b1>0)" %(expression_bati, str(no_data_value))

        # Creation d'un masque vecteur des batis pour la surface des zones baties
        command = "otbcli_BandMath -il %s %s -out %s uint8 -exp '%s ? 1 : 0'" %(classif_input, mnh_input, classif_built_mask, expression)
        if debug >= 3:
            print(command)
        exit_code = os.system(command)
        if exit_code != 0:
            print(command)
            print(cyan + "computeRoughnessByOcsMnh() : " + bold + red + "!!! Une erreur c'est produite au cours de la commande otbcli_BandMath : " + command + ". Voir message d'erreur." + endC, file=sys.stderr)
            raise

        # Récupération de la hauteur du bati
        command = "otbcli_BandMath -il %s %s -out %s float -exp '%s ? im2b1 : 0'" %(classif_input, mnh_input, built_height_temp, expression)
        if debug >= 3:
            print(command)
        exit_code = os.system(command)
        if exit_code != 0:
            print(command)
            print(cyan + "computeRoughnessByOcsMnh() : " + bold + red + "!!! Une erreur c'est produite au cours de la commande otbcli_BandMath : " + command + ". Voir message d'erreur." + endC, file=sys.stderr)
            raise

        command = "gdal_translate -a_srs EPSG:%s -a_nodata %s -of %s %s %s" %(str(epsg_proj), str(no_data_value), format_raster, built_height_temp, built_height)
        if debug >= 3:
            print(command)
        exit_code = os.system(command)
        if exit_code != 0:
            print(command)
            print(cyan + "computeRoughnessByOcsMnh() : " + bold + red + "!!! Une erreur c'est produite au cours de la comande : gdal_translate : " + command + ". Voir message d'erreur." + endC, file=sys.stderr)
            raise

        # Récupération du nombre de pixel bati de chaque maille pour definir la surface
        statisticsVectorRaster(classif_built_mask, grid_input, grid_output_temp, 1, False, False, True, ["min", "max", "median", "mean", "std", "unique", "range"], [], {}, path_time_log, True, format_vector, save_results_intermediate, overwrite)

        # Renomer le champ 'sum' en FIELD_NAME_AREA
        renameFieldsVector(grid_output_temp, ['sum'], [FIELD_NAME_AREA], format_vector)

        # Récupération de la hauteur moyenne du bati de chaque maille
        statisticsVectorRaster(built_height, grid_output_temp, grid_output, 1, False, False, True, ["min", "max", "median", 'mean', "std", "unique", "range"], [], {}, path_time_log, True, format_vector, save_results_intermediate, overwrite)

        # Renomer le champ 'mean' en FIELD_NAME_HRE
        renameFieldsVector(grid_output, ['sum'], [FIELD_NAME_HSUM], format_vector)

        # Calcul de la colonne FIELD_NAME_HRE division de FIELD_NAME_HSUM par FIELD_NAME_AREA
        field_values_list = getAttributeValues(grid_output, None, None, {FIELD_NAME_ID:FIELD_ID_TYPE, FIELD_NAME_HSUM:FIELD_H_TYPE, FIELD_NAME_AREA:FIELD_H_TYPE}, format_vector)

        field_new_values_list = []
        for index in range(0, len(field_values_list[FIELD_NAME_ID])) :
            value_h = 0.0
            if field_values_list[FIELD_NAME_AREA][index] > 0 :
                value_h = field_values_list[FIELD_NAME_HSUM][index] / field_values_list[FIELD_NAME_AREA][index]
            field_new_values_list.append({FIELD_NAME_HRE:value_h})

        # Ajour de la nouvelle colonne calculé FIELD_NAME_HRE
        addNewFieldVector(grid_output, FIELD_NAME_HRE, FIELD_H_TYPE, 0, None, None, format_vector)
        setAttributeValuesList(grid_output, field_new_values_list, format_vector)

        ##########################################
        ### Nettoyage des fichiers temporaires ###
        ##########################################
        if not save_results_intermediate:
            if os.path.exists(temp_path):
                shutil.rmtree(temp_path)

    print(cyan + "computeRoughnessByOcsMnh() : " + endC + "Fin du calcul de l'indicateur Height of Roughness Elements par OCS et MNT." + endC + "\n")
    timeLine(path_time_log, "Fin du calcul de l'indicateur Height of Roughness Elements par OCS et MNT  ending : ")

    return
Exemple #10
0
def estimateQualityClassification(image_input,
                                  vector_cut_input,
                                  vector_sample_input,
                                  vector_output,
                                  nb_dot,
                                  no_data_value,
                                  column_name_vector,
                                  column_name_ref,
                                  column_name_class,
                                  path_time_log,
                                  epsg=2154,
                                  format_raster='GTiff',
                                  format_vector="ESRI Shapefile",
                                  extension_raster=".tif",
                                  extension_vector=".shp",
                                  save_results_intermediate=False,
                                  overwrite=True):

    # Mise à jour du Log
    starting_event = "estimateQualityClassification() : Masks creation starting : "
    timeLine(path_time_log, starting_event)

    print(endC)
    print(bold + green +
          "## START : CREATE PRINT POINTS FILE FROM CLASSIF IMAGE" + endC)
    print(endC)

    if debug >= 2:
        print(bold + green +
              "estimateQualityClassification() : Variables dans la fonction" +
              endC)
        print(cyan + "estimateQualityClassification() : " + endC +
              "image_input : " + str(image_input) + endC)
        print(cyan + "estimateQualityClassification() : " + endC +
              "vector_cut_input : " + str(vector_cut_input) + endC)
        print(cyan + "estimateQualityClassification() : " + endC +
              "vector_sample_input : " + str(vector_sample_input) + endC)
        print(cyan + "estimateQualityClassification() : " + endC +
              "vector_output : " + str(vector_output) + endC)
        print(cyan + "estimateQualityClassification() : " + endC +
              "nb_dot : " + str(nb_dot) + endC)
        print(cyan + "estimateQualityClassification() : " + endC +
              "no_data_value : " + str(no_data_value) + endC)
        print(cyan + "estimateQualityClassification() : " + endC +
              "column_name_vector : " + str(column_name_vector) + endC)
        print(cyan + "estimateQualityClassification() : " + endC +
              "column_name_ref : " + str(column_name_ref) + endC)
        print(cyan + "estimateQualityClassification() : " + endC +
              "column_name_class : " + str(column_name_class) + endC)
        print(cyan + "estimateQualityClassification() : " + endC +
              "path_time_log : " + str(path_time_log) + endC)
        print(cyan + "estimateQualityClassification() : " + endC + "epsg  : " +
              str(epsg) + endC)
        print(cyan + "estimateQualityClassification() : " + endC +
              "format_raster : " + str(format_raster) + endC)
        print(cyan + "estimateQualityClassification() : " + endC +
              "format_vector : " + str(format_vector) + endC)
        print(cyan + "estimateQualityClassification() : " + endC +
              "extension_raster : " + str(extension_raster) + endC)
        print(cyan + "estimateQualityClassification() : " + endC +
              "extension_vector : " + str(extension_vector) + endC)
        print(cyan + "estimateQualityClassification() : " + endC +
              "save_results_intermediate : " + str(save_results_intermediate) +
              endC)
        print(cyan + "estimateQualityClassification() : " + endC +
              "overwrite : " + str(overwrite) + endC)

    # ETAPE 0 : PREPARATION DES FICHIERS INTERMEDIAIRES

    CODAGE = "uint16"

    SUFFIX_STUDY = '_study'
    SUFFIX_CUT = '_cut'
    SUFFIX_TEMP = '_temp'
    SUFFIX_SAMPLE = '_sample'

    repertory_output = os.path.dirname(vector_output)
    base_name = os.path.splitext(os.path.basename(vector_output))[0]

    vector_output_temp = repertory_output + os.sep + base_name + SUFFIX_TEMP + extension_vector
    raster_study = repertory_output + os.sep + base_name + SUFFIX_STUDY + extension_raster
    vector_study = repertory_output + os.sep + base_name + SUFFIX_STUDY + extension_vector
    raster_cut = repertory_output + os.sep + base_name + SUFFIX_CUT + extension_raster
    vector_sample_temp = repertory_output + os.sep + base_name + SUFFIX_SAMPLE + SUFFIX_TEMP + extension_vector

    # Mise à jour des noms de champs
    input_ref_col = ""
    val_ref = 0
    if (column_name_vector != "") and (not column_name_vector is None):
        input_ref_col = column_name_vector
    if (column_name_ref != "") and (not column_name_ref is None):
        val_ref_col = column_name_ref
    if (column_name_class != "") and (not column_name_class is None):
        val_class_col = column_name_class

    # ETAPE 1 : DEFINIR UN SHAPE ZONE D'ETUDE

    if (not vector_cut_input is None) and (vector_cut_input != "") and (
            os.path.isfile(vector_cut_input)):
        cutting_action = True
        vector_study = vector_cut_input

    else:
        cutting_action = False
        createVectorMask(image_input, vector_study)

    # ETAPE 2 : DECOUPAGE DU RASTEUR PAR LE VECTEUR D'EMPRISE SI BESOIN

    if cutting_action:
        # Identification de la tailles de pixels en x et en y
        pixel_size_x, pixel_size_y = getPixelWidthXYImage(image_input)

        # Si le fichier de sortie existe deja le supprimer
        if os.path.exists(raster_cut):
            removeFile(raster_cut)

        # Commande de découpe
        if not cutImageByVector(vector_study, image_input, raster_cut,
                                pixel_size_x, pixel_size_y, no_data_value, 0,
                                format_raster, format_vector):
            raise NameError(
                cyan + "estimateQualityClassification() : " + bold + red +
                "Une erreur c'est produite au cours du decoupage de l'image : "
                + image_input + endC)
        if debug >= 2:
            print(cyan + "estimateQualityClassification() : " + bold + green +
                  "DECOUPAGE DU RASTER %s AVEC LE VECTEUR %s" %
                  (image_input, vector_study) + endC)
    else:
        raster_cut = image_input

    # ETAPE 3 : CREATION DE LISTE POINTS AVEC DONNEE ISSU D'UN FICHIER RASTER

    # Gémotrie de l'image
    cols, rows, bands = getGeometryImage(raster_cut)
    xmin, xmax, ymin, ymax = getEmpriseImage(raster_cut)
    pixel_width, pixel_height = getPixelWidthXYImage(raster_cut)

    if debug >= 2:
        print("cols : " + str(cols))
        print("rows : " + str(rows))
        print("bands : " + str(bands))
        print("xmin : " + str(xmin))
        print("ymin : " + str(ymin))
        print("xmax : " + str(xmax))
        print("ymax : " + str(ymax))
        print("pixel_width : " + str(pixel_width))
        print("pixel_height : " + str(pixel_height))

    # ETAPE 3-1 : CAS CREATION D'UN FICHIER DE POINTS PAR TIRAGE ALEATOIRE DANS LA MATRICE IMAGE
    if (vector_sample_input is None) or (vector_sample_input == ""):
        is_sample_file = False

        # Les dimensions de l'image
        nb_pixels = abs(cols * rows)

        # Tirage aléatoire des points
        drawn_dot_list = []
        while len(drawn_dot_list) < nb_dot:
            val = random.randint(0, nb_pixels)
            if not val in drawn_dot_list:
                drawn_dot_list.append(val)

        # Creation d'un dico index valeur du tirage et attibuts pos_x, pos_y et value pixel
        points_random_value_dico = {}

        points_coordonnees_list = []
        for point in drawn_dot_list:
            pos_y = point // cols
            pos_x = point % cols
            coordonnees_list = [pos_x, pos_y]
            points_coordonnees_list.append(coordonnees_list)

        # Lecture dans le fichier raster des valeurs
        values_list = getPixelsValueListImage(raster_cut,
                                              points_coordonnees_list)
        print(values_list)
        for idx_point in range(len(drawn_dot_list)):
            val_class = values_list[idx_point]
            coordonnees_list = points_coordonnees_list[idx_point]
            pos_x = coordonnees_list[0]
            pos_y = coordonnees_list[1]
            coor_x = xmin + (pos_x * abs(pixel_width))
            coor_y = ymax - (pos_y * abs(pixel_height))
            point_attr_dico = {
                "Ident": idx_point,
                val_ref_col: int(val_ref),
                val_class_col: int(val_class)
            }
            points_random_value_dico[idx_point] = [[coor_x, coor_y],
                                                   point_attr_dico]

            if debug >= 4:
                print("idx_point : " + str(idx_point))
                print("pos_x : " + str(pos_x))
                print("pos_y : " + str(pos_y))
                print("coor_x : " + str(coor_x))
                print("coor_y : " + str(coor_y))
                print("val_class : " + str(val_class))
                print("")

    # ETAPE 3-2 : CAS D'UN FICHIER DE POINTS DEJA EXISTANT MISE A JOUR DE LA DONNEE ISSU Du RASTER
    else:
        # Le fichier de points d'analyses existe
        is_sample_file = True
        cutVectorAll(vector_study, vector_sample_input, vector_sample_temp,
                     format_vector)
        if input_ref_col != "":
            points_coordinates_dico = readVectorFilePoints(
                vector_sample_temp, [input_ref_col], format_vector)
        else:
            points_coordinates_dico = readVectorFilePoints(
                vector_sample_temp, [], format_vector)

        # Création du dico
        points_random_value_dico = {}

        points_coordonnees_list = []
        for index_key in points_coordinates_dico:
            # Recuperer les valeurs des coordonnees
            coord_info_list = points_coordinates_dico[index_key]
            coor_x = coord_info_list[0]
            coor_y = coord_info_list[1]
            pos_x = int(round((coor_x - xmin) / abs(pixel_width)))
            pos_y = int(round((ymax - coor_y) / abs(pixel_height)))
            coordonnees_list = [pos_x, pos_y]
            points_coordonnees_list.append(coordonnees_list)

        # Lecture dans le fichier raster des valeurs
        values_list = getPixelsValueListImage(raster_cut,
                                              points_coordonnees_list)

        for index_key in points_coordinates_dico:
            # Récuperer les valeurs des coordonnees
            coord_info_list = points_coordinates_dico[index_key]
            coor_x = coord_info_list[0]
            coor_y = coord_info_list[1]
            # Récupérer la classe de référence dans le vecteur d'entrée
            if input_ref_col != "":
                label = coord_info_list[2]
                val_ref = label.get(input_ref_col)
            # Récupérer la classe issue du raster d'entrée
            val_class = values_list[index_key]
            # Création du dico contenant identifiant du point, valeur de référence, valeur du raster d'entrée
            point_attr_dico = {
                "Ident": index_key,
                val_ref_col: int(val_ref),
                val_class_col: int(val_class)
            }
            if debug >= 4:
                print("point_attr_dico: " + str(point_attr_dico))
            points_random_value_dico[index_key] = [[coor_x, coor_y],
                                                   point_attr_dico]

    # ETAPE 4 : CREATION ET DECOUPAGE DU FICHIER VECTEUR RESULTAT PAR LE SHAPE D'ETUDE

    # Creer le fichier de points
    if is_sample_file and os.path.exists(vector_sample_temp):

        attribute_dico = {val_class_col: ogr.OFTInteger}
        # Recopie du fichier
        removeVectorFile(vector_output_temp)
        copyVectorFile(vector_sample_temp, vector_output_temp)

        # Ajout des champs au fichier de sortie
        for field_name in attribute_dico:
            addNewFieldVector(vector_output_temp, field_name,
                              attribute_dico[field_name], 0, None, None,
                              format_vector)

        # Préparation des donnees
        field_new_values_list = []
        for index_key in points_random_value_dico:
            point_attr_dico = points_random_value_dico[index_key][1]
            point_attr_dico.pop(val_ref_col, None)
            field_new_values_list.append(point_attr_dico)

        # Ajout des donnees
        setAttributeValuesList(vector_output_temp, field_new_values_list,
                               format_vector)

    else:
        # Définir les attibuts du fichier résultat
        attribute_dico = {
            "Ident": ogr.OFTInteger,
            val_ref_col: ogr.OFTInteger,
            val_class_col: ogr.OFTInteger
        }

        createPointsFromCoordList(attribute_dico, points_random_value_dico,
                                  vector_output_temp, epsg, format_vector)

    # Découpage du fichier de points d'echantillons
    cutVectorAll(vector_study, vector_output_temp, vector_output,
                 format_vector)

    # ETAPE 5 : SUPPRESIONS FICHIERS INTERMEDIAIRES INUTILES

    # Suppression des données intermédiaires
    if not save_results_intermediate:
        if cutting_action:
            removeFile(raster_cut)
        else:
            removeVectorFile(vector_study)
            removeFile(raster_study)
        if is_sample_file:
            removeVectorFile(vector_sample_temp)
        removeVectorFile(vector_output_temp)

    print(endC)
    print(bold + green +
          "## END : CREATE PRINT POINTS FILE FROM CLASSIF IMAGE" + endC)
    print(endC)

    # Mise à jour du Log
    ending_event = "estimateQualityClassification() : Masks creation ending : "
    timeLine(path_time_log, ending_event)

    return
Exemple #11
0
def occupationIndicator(classif_input,
                        mnh_input,
                        vector_grid_input,
                        vector_grid_output,
                        class_label_dico,
                        epsg,
                        path_time_log,
                        format_raster='GTiff',
                        format_vector='ESRI Shapefile',
                        extension_raster=".tif",
                        extension_vector=".shp",
                        save_results_intermediate=False,
                        overwrite=True):

    # Mise à jour du Log
    starting_event = "occupationIndicator() : Calcul de l'indicateur occupation du sol/hauteur de végétation starting : "
    timeLine(path_time_log, starting_event)
    print(
        bold + green +
        "Début du calcul de l'indicateur 'occupation du sol/hauteur de végétation'."
        + endC + "\n")

    if debug >= 3:
        print(bold + green +
              "occupationIndicator() : Variables dans la fonction" + endC)
        print(cyan + "occupationIndicator() : " + endC + "classif_input : " +
              str(classif_input) + endC)
        print(cyan + "occupationIndicator() : " + endC + "mnh_input : " +
              str(mnh_input) + endC)
        print(cyan + "occupationIndicator() : " + endC +
              "vector_grid_input : " + str(vector_grid_input) + endC)
        print(cyan + "occupationIndicator() : " + endC +
              "vector_grid_output : " + str(vector_grid_output) + endC)
        print(cyan + "occupationIndicator() : " + endC +
              "class_label_dico : " + str(class_label_dico) + endC)
        print(cyan + "occupationIndicator() : " + endC + "epsg : " +
              str(epsg) + endC)
        print(cyan + "occupationIndicator() : " + endC + "path_time_log : " +
              str(path_time_log) + endC)
        print(cyan + "occupationIndicator() : " + endC + "format_raster : " +
              str(format_raster) + endC)
        print(cyan + "occupationIndicator() : " + endC + "format_vector : " +
              str(format_vector) + endC)
        print(cyan + "occupationIndicator() : " + endC +
              "extension_raster : " + str(extension_raster) + endC)
        print(cyan + "occupationIndicator() : " + endC +
              "extension_vector : " + str(extension_vector) + endC)
        print(cyan + "occupationIndicator() : " + endC +
              "save_results_intermediate : " + str(save_results_intermediate) +
              endC)
        print(cyan + "occupationIndicator() : " + endC + "overwrite : " +
              str(overwrite) + endC)

    # Constante
    FIELD_OCS_NAME = 'class_OCS'
    FIELD_OCS_TYPE = ogr.OFTInteger
    FIELD_MAJORITY_NAME = 'majority'

    # Test si le vecteur de sortie existe déjà et si il doit être écrasés
    check = os.path.isfile(vector_grid_output)

    if check and not overwrite:  # Si le fichier de sortie existent deja et que overwrite n'est pas activé
        print(
            bold + yellow +
            "Le calcul de l'indicateur occupation du sol/hauteur de végétation a déjà eu lieu. \n"
            + endC)
        print(bold + yellow + "Grid vector output : " + vector_grid_output +
              " already exists and will not be created again." + endC)
    else:
        if check:
            try:
                removeVectorFile(vector_grid_output)
            except Exception:
                pass  # si le fichier n'existe pas, il ne peut pas être supprimé : cette étape est ignorée

        ############################################
        ### Préparation générale des traitements ###
        ############################################

        # Récuperation de la projection de l'image
        epsg_proj = getProjectionImage(classif_input)
        if epsg_proj == 0:
            epsg_proj = epsg

        # Liste des classes
        key_class_label_list = list(class_label_dico.keys())

        # Préparation des fichiers temporaires
        temp_path = os.path.dirname(vector_grid_output) + os.sep + "TEMP_OCS"

        # Nettoyage du repertoire temporaire si il existe
        if os.path.exists(temp_path):
            shutil.rmtree(temp_path)
        os.makedirs(temp_path)

        tempOCS = temp_path + os.sep + "occupation_du_sol" + extension_vector
        tempHveg = temp_path + os.sep + "occupation_du_sol_hauteur_de_vegetation" + extension_vector

        temp_class0 = temp_path + os.sep + "occupation_du_sol_hauteur_de_vegetation_class0" + extension_vector
        temp_class1 = temp_path + os.sep + "occupation_du_sol_hauteur_de_vegetation_class1" + extension_vector
        temp_class2 = temp_path + os.sep + "occupation_du_sol_hauteur_de_vegetation_class2" + extension_vector
        temp_class3 = temp_path + os.sep + "occupation_du_sol_hauteur_de_vegetation_class3" + extension_vector
        temp_class4 = temp_path + os.sep + "occupation_du_sol_hauteur_de_vegetation_class4" + extension_vector

        vegetation_height_temp = temp_path + os.sep + "hauteur_vegetation_temp" + extension_raster
        vegetation_height = temp_path + os.sep + "hauteur_vegetation" + extension_raster

        ##############################
        ### Calcul de l'indicateur ###
        ##############################

        # Récupération de l'occupation du sol de chaque maille
        statisticsVectorRaster(classif_input, vector_grid_input, tempOCS, 1,
                               True, True, False, [], [], class_label_dico,
                               path_time_log, True, format_vector,
                               save_results_intermediate, overwrite)

        # Récupération de la hauteur moyenne et la hauteur max de la végétation de chaque maille
        command = "otbcli_BandMath -il %s %s -out %s float -exp 'im1b1==%s ? im2b1 : 0'" % (
            classif_input, mnh_input, vegetation_height_temp,
            str(key_class_label_list[4]))
        exit_code = os.system(command)
        if exit_code != 0:
            print(command)
            print(
                cyan + "occupationIndicator() : " + bold + red +
                "!!! Une erreur c'est produite au cours de la commande otbcli_BandMath : "
                + command + ". Voir message d'erreur." + endC,
                file=sys.stderr)
            raise

        command = "gdal_translate -a_srs EPSG:%s -a_nodata 0 -of %s %s %s" % (
            str(epsg_proj), format_raster, vegetation_height_temp,
            vegetation_height)
        exit_code = os.system(command)
        if exit_code != 0:
            print(command)
            print(
                cyan + "occupationIndicator() : " + bold + red +
                "!!! Une erreur c'est produite au cours de la comande : gdal_translate : "
                + command + ". Voir message d'erreur." + endC,
                file=sys.stderr)
            raise

        statisticsVectorRaster(vegetation_height, tempOCS, tempHveg, 1, False,
                               False, True, [], [], {}, path_time_log, True,
                               format_vector, save_results_intermediate,
                               overwrite)

        # Définir le nom des champs
        temp_class_list = []
        list_class_str = ""

        for class_str in key_class_label_list:
            list_class_str += "%s, " % (str(class_label_dico[class_str]))
        built_str = class_label_dico[key_class_label_list[0]]
        road_str = class_label_dico[key_class_label_list[1]]
        water_str = class_label_dico[key_class_label_list[2]]
        baresoil_str = class_label_dico[key_class_label_list[3]]
        vegetation_str = class_label_dico[key_class_label_list[4]]
        vegetation_height_medium_str = "H_moy_" + vegetation_str[0:3]
        vegetation_height_max_str = "H_max_" + vegetation_str[0:3]

        if debug >= 3:
            print("built_str = " + built_str)
            print("road_str = " + road_str)
            print("water_str = " + water_str)
            print("baresoil_str = " + baresoil_str)
            print("vegetation_str = " + vegetation_str)
            print("vegetation_height_medium_str = " +
                  vegetation_height_medium_str)
            print("vegetation_height_max_str = " + vegetation_height_max_str)

        column = "'ID, majority, minority, %s%s, %s, %s'" % (
            list_class_str, vegetation_height_max_str,
            vegetation_height_medium_str, FIELD_OCS_NAME)

        # Ajout d'un champ renvoyant la classe d'OCS attribué à chaque polygone et renomer le champ 'mean'
        renameFieldsVector(tempHveg, ['max'], [vegetation_height_max_str],
                           format_vector)
        renameFieldsVector(tempHveg, ['mean'], [vegetation_height_medium_str],
                           format_vector)
        addNewFieldVector(tempHveg, FIELD_OCS_NAME, FIELD_OCS_TYPE, 0, None,
                          None, format_vector)

        # Attribution de la classe 0 => classe majoritaire de bâti ou route ou eau
        #expression = "(" + built_str + " >= " + baresoil_str + " AND " + built_str + " >= " + vegetation_str + ") OR (" + road_str + " >= " + baresoil_str + " AND " + road_str + " >= " + vegetation_str + ") OR (" + water_str + " >= " + baresoil_str + " AND " + water_str + " >= " + vegetation_str + ")"
        expression = "(" + FIELD_MAJORITY_NAME + " = '" + built_str + "') OR (" + FIELD_MAJORITY_NAME + " = '" + road_str + "') OR (" + FIELD_MAJORITY_NAME + " = '" + water_str + "')"
        if debug >= 3:
            print(expression)
        ret = filterSelectDataVector(tempHveg, temp_class0, column, expression,
                                     format_vector)
        if not ret:
            raise NameError(
                cyan + "occupationIndicator() : " + bold + red +
                "Attention problème lors du filtrage des BD vecteurs l'expression SQL %s est incorrecte"
                % (expression) + endC)
        updateFieldVector(temp_class0, FIELD_OCS_NAME, 0, format_vector)
        temp_class_list.append(temp_class0)

        # Attribution de la classe 1 => classe majoritaire de sol nu
        #expression = "(" + baresoil_str + " > " + built_str + " AND " + baresoil_str + " > " + road_str + " AND " + baresoil_str + " > " + water_str + " AND " + baresoil_str + " >= " + vegetation_str + ")"
        expression = "(" + FIELD_MAJORITY_NAME + " = '" + baresoil_str + "')"
        if debug >= 3:
            print(expression)
        ret = filterSelectDataVector(tempHveg, temp_class1, column, expression,
                                     format_vector)
        if not ret:
            raise NameError(
                cyan + "occupationIndicator() : " + bold + red +
                "Attention problème lors du filtrage des BD vecteurs l'expression SQL %s est incorrecte"
                % (expression) + endC)
        updateFieldVector(temp_class1, FIELD_OCS_NAME, 1, format_vector)
        temp_class_list.append(temp_class1)

        # Attribution de la classe 2 => classe majoritaire de végétation avec Hauteur inferieur à 1
        #expression = "(" + vegetation_str + " > " + built_str + " AND " + vegetation_str + " > " + road_str + " AND " + vegetation_str + " > " + water_str + " AND " + vegetation_str + " > " + baresoil_str + ") AND (" + vegetation_height_medium_str + " < 1)"
        expression = "(" + FIELD_MAJORITY_NAME + " = '" + vegetation_str + "') AND (" + vegetation_height_medium_str + " < 1)"
        if debug >= 3:
            print(expression)
        ret = filterSelectDataVector(tempHveg, temp_class2, column, expression,
                                     format_vector)
        if not ret:
            raise NameError(
                cyan + "occupationIndicator() : " + bold + red +
                "Attention problème lors du filtrage des BD vecteurs l'expression SQL %s est incorrecte"
                % (expression) + endC)
        updateFieldVector(temp_class2, FIELD_OCS_NAME, 2, format_vector)
        temp_class_list.append(temp_class2)

        # Attribution de la classe 3 => classe majoritaire de végétation avec Hauteur entre 1 et 5
        #expression = "(" + vegetation_str + " > " + built_str + " AND " + vegetation_str + " > " + road_str + " AND " + vegetation_str + " > " + water_str + " AND " + vegetation_str + " > " + baresoil_str + ") AND (" + vegetation_height_medium_str + " >= 1 AND " + vegetation_height_medium_str + " < 5)"
        expression = "(" + FIELD_MAJORITY_NAME + " = '" + vegetation_str + "') AND (" + vegetation_height_medium_str + " >= 1 AND " + vegetation_height_medium_str + " < 5)"
        if debug >= 3:
            print(expression)
        ret = filterSelectDataVector(tempHveg, temp_class3, column, expression,
                                     format_vector)
        if not ret:
            raise NameError(
                cyan + "occupationIndicator() : " + bold + red +
                "Attention problème lors du filtrage des BD vecteurs l'expression SQL %s est incorrecte"
                % (expression) + endC)
        updateFieldVector(temp_class3, FIELD_OCS_NAME, 3, format_vector)
        temp_class_list.append(temp_class3)

        # Attribution de la classe 4 => classe majoritaire de végétation avec Hauteur > 5
        #expression = "(" + vegetation_str + " > " + built_str + " AND " + vegetation_str + " > " + road_str + " AND " + vegetation_str + " > " + water_str + " AND " + vegetation_str + " > " + baresoil_str + ") AND (" + vegetation_height_medium_str + " >= 5)"
        expression = "(" + FIELD_MAJORITY_NAME + " = '" + vegetation_str + "') AND (" + vegetation_height_medium_str + " >= 5)"
        if debug >= 3:
            print(expression)
        ret = filterSelectDataVector(tempHveg, temp_class4, column, expression,
                                     format_vector)
        if not ret:
            raise NameError(
                cyan + "occupationIndicator() : " + bold + red +
                "Attention problème lors du filtrage des BD vecteurs l'expression SQL %s est incorrecte"
                % (expression) + endC)
        updateFieldVector(temp_class4, FIELD_OCS_NAME, 4, format_vector)
        temp_class_list.append(temp_class4)

        fusionVectors(temp_class_list, vector_grid_output, format_vector)

        ##########################################
        ### Nettoyage des fichiers temporaires ###
        ##########################################

        if not save_results_intermediate:
            if os.path.exists(temp_path):
                shutil.rmtree(temp_path)

    # Mise à jour du Log
    print(
        bold + green +
        "Fin du calcul de l'indicateur 'occupation du sol/hauteur de végétation'."
        + endC + "\n")
    ending_event = "occupationIndicator() : Calcul de l'indicateur occupation du sol/hauteur de végétation ending : "
    timeLine(path_time_log, ending_event)

    return