def computeMajorityClass(input_grid, temp_directory, nodata_field, built_field,
                         mineral_field, baresoil_field, water_field,
                         vegetation_field, high_vegetation_field,
                         low_vegetation_field, maj_ocs_field, veg_mean_field,
                         class_label_dico_out, format_vector, extension_vector,
                         overwrite):

    SUFFIX_CLASS = '_class'
    FIELD_TYPE = ogr.OFTInteger
    FIELD_NAME_MAJORITY = 'majority'

    temp_class_list = []

    base_name = os.path.splitext(os.path.basename(input_grid))[0]
    temp_grid = temp_directory + os.sep + base_name + SUFFIX_CLASS + extension_vector
    temp_class0 = temp_directory + os.sep + base_name + SUFFIX_CLASS + "0" + extension_vector
    temp_class1 = temp_directory + os.sep + base_name + SUFFIX_CLASS + "1" + extension_vector
    temp_class2 = temp_directory + os.sep + base_name + SUFFIX_CLASS + "2" + extension_vector
    temp_class3 = temp_directory + os.sep + base_name + SUFFIX_CLASS + "3" + extension_vector
    temp_class4 = temp_directory + os.sep + base_name + SUFFIX_CLASS + "4" + extension_vector

    ### Récupération de la classe majoritaire

    if debug >= 3:
        print(cyan + "computeMajorityClass() : " + endC + bold +
              "Récupération de la classe majoritaire." + endC + '\n')

    addNewFieldVector(input_grid,
                      maj_ocs_field,
                      FIELD_TYPE,
                      field_value=None,
                      field_width=None,
                      field_precision=None,
                      format_vector=format_vector)
    attr_names_list = getAttributeNameList(input_grid,
                                           format_vector=format_vector)
    attr_names_list_str = "'"
    for attr_name in attr_names_list:
        attr_names_list_str += attr_name + ', '
    attr_names_list_str = attr_names_list_str[:-2] + "'"

    expression = "%s = '%s' OR %s = '%s' OR %s = '%s' OR %s = '%s'" % (
        FIELD_NAME_MAJORITY, nodata_field, FIELD_NAME_MAJORITY, built_field,
        FIELD_NAME_MAJORITY, mineral_field, FIELD_NAME_MAJORITY, water_field)
    ret = filterSelectDataVector(input_grid,
                                 temp_class0,
                                 attr_names_list_str,
                                 expression,
                                 overwrite=overwrite,
                                 format_vector=format_vector)
    updateFieldVector(temp_class0,
                      field_name=maj_ocs_field,
                      value=class_label_dico_out["MAJ_OTHERS_CLASS"],
                      format_vector=format_vector)
    temp_class_list.append(temp_class0)

    expression = "%s = '%s'" % (FIELD_NAME_MAJORITY, baresoil_field)
    ret = filterSelectDataVector(input_grid,
                                 temp_class1,
                                 attr_names_list_str,
                                 expression,
                                 overwrite=overwrite,
                                 format_vector=format_vector)
    updateFieldVector(temp_class1,
                      field_name=maj_ocs_field,
                      value=class_label_dico_out["MAJ_BARESOIL_CLASS"],
                      format_vector=format_vector)
    temp_class_list.append(temp_class1)

    expression = "(%s = '%s' OR %s = '%s' OR %s = '%s') AND (%s < 1)" % (
        FIELD_NAME_MAJORITY, vegetation_field, FIELD_NAME_MAJORITY,
        low_vegetation_field, FIELD_NAME_MAJORITY, high_vegetation_field,
        veg_mean_field)
    ret = filterSelectDataVector(input_grid,
                                 temp_class2,
                                 attr_names_list_str,
                                 expression,
                                 overwrite=overwrite,
                                 format_vector=format_vector)
    updateFieldVector(temp_class2,
                      field_name=maj_ocs_field,
                      value=class_label_dico_out["MAJ_LOW_VEG_CLASS"],
                      format_vector=format_vector)
    temp_class_list.append(temp_class2)

    expression = "(%s = '%s' OR %s = '%s' OR %s = '%s') AND (%s >= 1 AND %s < 5)" % (
        FIELD_NAME_MAJORITY, vegetation_field, FIELD_NAME_MAJORITY,
        low_vegetation_field, FIELD_NAME_MAJORITY, high_vegetation_field,
        veg_mean_field, veg_mean_field)
    ret = filterSelectDataVector(input_grid,
                                 temp_class3,
                                 attr_names_list_str,
                                 expression,
                                 overwrite=overwrite,
                                 format_vector=format_vector)
    updateFieldVector(temp_class3,
                      field_name=maj_ocs_field,
                      value=class_label_dico_out["MAJ_MED_VEG_CLASS"],
                      format_vector=format_vector)
    temp_class_list.append(temp_class3)

    expression = "(%s = '%s' OR %s = '%s' OR %s = '%s') AND (%s >= 5)" % (
        FIELD_NAME_MAJORITY, vegetation_field, FIELD_NAME_MAJORITY,
        low_vegetation_field, FIELD_NAME_MAJORITY, high_vegetation_field,
        veg_mean_field)
    ret = filterSelectDataVector(input_grid,
                                 temp_class4,
                                 attr_names_list_str,
                                 expression,
                                 overwrite=overwrite,
                                 format_vector=format_vector)
    updateFieldVector(temp_class4,
                      field_name=maj_ocs_field,
                      value=class_label_dico_out["MAJ_HIGH_VEG_CLASS"],
                      format_vector=format_vector)
    temp_class_list.append(temp_class4)

    fusionVectors(temp_class_list, temp_grid, format_vector=format_vector)
    removeVectorFile(input_grid, format_vector=format_vector)
    copyVectorFile(temp_grid, input_grid, format_vector=format_vector)

    return 0
def occupationIndicator(input_grid,
                        output_grid,
                        class_label_dico_out,
                        input_vector_classif,
                        field_classif_name,
                        input_soil_occupation,
                        input_height_model,
                        class_build_list,
                        class_road_list,
                        class_baresoil_list,
                        class_water_list,
                        class_vegetation_list,
                        class_high_vegetation_list,
                        class_low_vegetation_list,
                        epsg=2154,
                        no_data_value=0,
                        format_raster='GTiff',
                        format_vector='ESRI Shapefile',
                        extension_raster='.tif',
                        extension_vector='.shp',
                        path_time_log='',
                        save_results_intermediate=False,
                        overwrite=True):

    if debug >= 3:
        print(
            '\n' + bold + green +
            "Calcul d'indicateurs du taux de classes OCS - Variables dans la fonction :"
            + endC)
        print(cyan + "    occupationIndicator() : " + endC + "input_grid : " +
              str(input_grid) + endC)
        print(cyan + "    occupationIndicator() : " + endC + "output_grid : " +
              str(output_grid) + endC)
        print(cyan + "    occupationIndicator() : " + endC +
              "class_label_dico_out : " + str(class_label_dico_out) + endC)
        print(cyan + "    occupationIndicator() : " + endC +
              "input_vector_classif : " + str(input_vector_classif) + endC)
        print(cyan + "    occupationIndicator() : " + endC +
              "field_classif_name : " + str(field_classif_name) + endC)
        print(cyan + "    occupationIndicator() : " + endC +
              "input_soil_occupation : " + str(input_soil_occupation) + endC)
        print(cyan + "    occupationIndicator() : " + endC +
              "input_height_model : " + str(input_height_model) + endC)
        print(cyan + "    occupationIndicator() : " + endC +
              "class_build_list : " + str(class_build_list) + endC)
        print(cyan + "    occupationIndicator() : " + endC +
              "class_road_list : " + str(class_road_list) + endC)
        print(cyan + "    occupationIndicator() : " + endC +
              "class_baresoil_list : " + str(class_baresoil_list) + endC)
        print(cyan + "    occupationIndicator() : " + endC +
              "class_water_list : " + str(class_water_list) + endC)
        print(cyan + "    occupationIndicator() : " + endC +
              "class_vegetation_list : " + str(class_vegetation_list) + endC)
        print(cyan + "    occupationIndicator() : " + endC +
              "class_high_vegetation_list : " +
              str(class_high_vegetation_list) + endC)
        print(cyan + "    occupationIndicator() : " + endC +
              "class_low_vegetation_list : " + str(class_low_vegetation_list) +
              endC)
        print(cyan + "    occupationIndicator() : " + endC + "epsg : " +
              str(epsg) + endC)
        print(cyan + "    occupationIndicator() : " + endC +
              "no_data_value : " + str(no_data_value) + endC)
        print(cyan + "    occupationIndicator() : " + endC +
              "format_raster : " + str(format_raster) + endC)
        print(cyan + "    occupationIndicator() : " + endC +
              "format_vector : " + str(format_vector) + endC)
        print(cyan + "    occupationIndicator() : " + endC +
              "extension_raster : " + str(extension_raster) + endC)
        print(cyan + "    occupationIndicator() : " + endC +
              "extension_vector : " + str(extension_vector) + endC)
        print(cyan + "    occupationIndicator() : " + endC +
              "path_time_log : " + str(path_time_log) + endC)
        print(cyan + "    occupationIndicator() : " + endC +
              "save_results_intermediate : " + str(save_results_intermediate) +
              endC)
        print(cyan + "    occupationIndicator() : " + endC + "overwrite : " +
              str(overwrite) + endC + '\n')

    # Définition des constantes
    CODAGE_8BITS = 'uint8'
    CODAGE_FLOAT = 'float'
    NODATA_FIELD = 'nodata'

    PREFIX_S = 'S_'
    SUFFIX_TEMP = '_temp'
    SUFFIX_RASTER = '_raster'
    SUFFIX_HEIGHT = '_height'
    SUFFIX_VEGETATION = '_vegetation'

    VEG_MEAN_FIELD = 'veg_h_mean'
    VEG_MAX_FIELD = 'veg_h_max'
    VEG_RATE_FIELD = 'veg_h_rate'
    MAJ_OCS_FIELD = 'class_OCS'

    BUILT_FIELD, BUILT_LABEL = 'built', 1
    MINERAL_FIELD, MINERAL_LABEL = 'mineral', 2
    BARESOIL_FIELD, BARESOIL_LABEL = 'baresoil', 3
    WATER_FIELD, WATER_LABEL = 'water', 4
    VEGETATION_FIELD, VEGETATION_LABEL = 'veget', 5
    HIGH_VEGETATION_FIELD, HIGH_VEGETATION_LABEL = 'high_veg', 6
    LOW_VEGETATION_FIELD, LOW_VEGETATION_LABEL = 'low_veg', 7

    # Mise à jour du log
    starting_event = "occupationIndicator() : Début du traitement : "
    timeLine(path_time_log, starting_event)

    print(cyan + "occupationIndicator() : " + bold + green +
          "DEBUT DES TRAITEMENTS" + endC + '\n')

    # Définition des variables 'basename'
    output_grid_basename = os.path.basename(os.path.splitext(output_grid)[0])
    output_grid_dirname = os.path.dirname(output_grid)
    soil_occupation_basename = os.path.basename(
        os.path.splitext(input_soil_occupation)[0])

    # Définition des variables temp
    temp_directory = output_grid_dirname + os.sep + output_grid_basename
    temp_grid = temp_directory + os.sep + output_grid_basename + SUFFIX_TEMP + extension_vector
    temp_soil_occupation = temp_directory + os.sep + soil_occupation_basename + SUFFIX_TEMP + SUFFIX_RASTER + extension_raster
    temp_height_vegetation = temp_directory + os.sep + output_grid_basename + SUFFIX_HEIGHT + SUFFIX_VEGETATION + extension_raster

    # Nettoyage des traitements précédents
    if overwrite:
        if debug >= 3:
            print(cyan + "occupationIndicator() : " + endC +
                  "Nettoyage des traitements précédents." + endC + '\n')
        removeFile(output_grid)
        cleanTempData(temp_directory)
    else:
        if os.path.exists(output_grid):
            raise NameError(
                cyan + "occupationIndicator() : " + bold + yellow +
                "Le fichier de sortie existe déjà et ne sera pas regénéré." +
                endC + '\n')
        pass

    #############
    # Etape 0/3 # Préparation des traitements
    #############

    print(cyan + "occupationIndicator() : " + bold + green +
          "ETAPE 0/3 - Début de la préparation des traitements." + endC + '\n')

    # Rasterisation de l'information de classification (OCS) si au format vecteur en entrée
    if input_vector_classif != "":
        if debug >= 3:
            print(cyan + "occupationIndicator() : " + endC + bold +
                  "Rasterisation de l'OCS vecteur." + endC + '\n')
        reference_image = input_soil_occupation
        soil_occupation_vector_basename = os.path.basename(
            os.path.splitext(input_vector_classif)[0])
        input_soil_occupation = temp_directory + os.sep + soil_occupation_vector_basename + SUFFIX_RASTER + extension_raster
        command = "otbcli_Rasterization -in %s -out %s %s -im %s -background 0 -mode attribute -mode.attribute.field %s" % (
            input_vector_classif, input_soil_occupation, CODAGE_8BITS,
            reference_image, field_classif_name)
        if debug >= 3:
            print(command)
        exit_code = os.system(command)
        if exit_code != 0:
            raise NameError(
                cyan + "occupationIndicator() : " + bold + red +
                "Erreur lors de la rasterisation de l'OCS vecteur." + endC)

    # Analyse de la couche OCS raster
    class_other_list = identifyPixelValues(input_soil_occupation)
    no_data_ocs = getNodataValueImage(input_soil_occupation, 1)
    if no_data_ocs != None:
        no_data_value = no_data_ocs

    # Affectation de nouveaux codes de classification
    divide_vegetation_classes = False
    if class_high_vegetation_list != [] and class_low_vegetation_list != []:
        divide_vegetation_classes = True

    col_to_delete_list = [
        "minority", PREFIX_S + NODATA_FIELD, PREFIX_S + BUILT_FIELD,
        PREFIX_S + MINERAL_FIELD, PREFIX_S + BARESOIL_FIELD,
        PREFIX_S + WATER_FIELD
    ]
    class_label_dico = {
        int(no_data_value): NODATA_FIELD,
        int(BUILT_LABEL): BUILT_FIELD,
        int(MINERAL_LABEL): MINERAL_FIELD,
        int(BARESOIL_LABEL): BARESOIL_FIELD,
        int(WATER_LABEL): WATER_FIELD
    }
    if not divide_vegetation_classes:
        class_label_dico[int(VEGETATION_LABEL)] = VEGETATION_FIELD
        col_to_delete_list.append(PREFIX_S + VEGETATION_FIELD)
    else:
        class_label_dico[int(HIGH_VEGETATION_LABEL)] = HIGH_VEGETATION_FIELD
        class_label_dico[int(LOW_VEGETATION_LABEL)] = LOW_VEGETATION_FIELD
        col_to_delete_list.append(PREFIX_S + HIGH_VEGETATION_FIELD)
        col_to_delete_list.append(PREFIX_S + LOW_VEGETATION_FIELD)

    # Gestion de la réaffectation des classes
    if debug >= 3:
        print(cyan + "occupationIndicator() : " + endC + bold +
              "Reaffectation du raster OCS." + endC + '\n')

    reaff_class_list = []
    macro_reaff_class_list = []

    for label in class_build_list:
        if label in class_other_list:
            class_other_list.remove(label)
        reaff_class_list.append(label)
        macro_reaff_class_list.append(BUILT_LABEL)

    for label in class_road_list:
        if label in class_other_list:
            class_other_list.remove(label)
        reaff_class_list.append(label)
        macro_reaff_class_list.append(MINERAL_LABEL)

    for label in class_baresoil_list:
        if label in class_other_list:
            class_other_list.remove(label)
        reaff_class_list.append(label)
        macro_reaff_class_list.append(BARESOIL_LABEL)

    for label in class_water_list:
        if label in class_other_list:
            class_other_list.remove(label)
        reaff_class_list.append(label)
        macro_reaff_class_list.append(WATER_LABEL)

    if not divide_vegetation_classes:
        for label in class_vegetation_list:
            if label in class_other_list:
                class_other_list.remove(label)
            reaff_class_list.append(label)
            macro_reaff_class_list.append(VEGETATION_LABEL)
    else:
        for label in class_high_vegetation_list:
            if label in class_other_list:
                class_other_list.remove(label)
            reaff_class_list.append(label)
            macro_reaff_class_list.append(HIGH_VEGETATION_LABEL)
        for label in class_low_vegetation_list:
            if label in class_other_list:
                class_other_list.remove(label)
            reaff_class_list.append(label)
            macro_reaff_class_list.append(LOW_VEGETATION_LABEL)

    # Reste des valeurs de pixel nom utilisé
    for label in class_other_list:
        reaff_class_list.append(label)
        macro_reaff_class_list.append(no_data_value)

    reallocateClassRaster(input_soil_occupation, temp_soil_occupation,
                          reaff_class_list, macro_reaff_class_list,
                          CODAGE_8BITS)

    print(cyan + "occupationIndicator() : " + bold + green +
          "ETAPE 0/3 - Fin de la préparation des traitements." + endC + '\n')

    #############
    # Etape 1/3 # Calcul des indicateurs de taux de classes OCS
    #############

    print(
        cyan + "occupationIndicator() : " + bold + green +
        "ETAPE 1/3 - Début du calcul des indicateurs de taux de classes OCS." +
        endC + '\n')

    if debug >= 3:
        print(cyan + "occupationIndicator() : " + endC + bold +
              "Calcul des indicateurs de taux de classes OCS." + endC + '\n')

    statisticsVectorRaster(temp_soil_occupation, input_grid, temp_grid, 1,
                           True, True, False, col_to_delete_list, [],
                           class_label_dico, path_time_log, True,
                           format_vector, save_results_intermediate, overwrite)

    # Fusion des classes végétation dans le cas où haute et basse sont séparées (pour utilisation du taux de végétation dans le logigramme)
    if divide_vegetation_classes:
        temp_grid_v2 = os.path.splitext(
            temp_grid)[0] + "_v2" + extension_vector
        sql_statement = "SELECT *, (%s + %s) AS %s FROM %s" % (
            HIGH_VEGETATION_FIELD, LOW_VEGETATION_FIELD, VEGETATION_FIELD,
            os.path.splitext(os.path.basename(temp_grid))[0])
        os.system("ogr2ogr -sql '%s' -dialect SQLITE %s %s" %
                  (sql_statement, temp_grid_v2, temp_grid))
        removeVectorFile(temp_grid, format_vector=format_vector)
        copyVectorFile(temp_grid_v2, temp_grid, format_vector=format_vector)

    print(cyan + "occupationIndicator() : " + bold + green +
          "ETAPE 1/3 - Fin du calcul des indicateurs de taux de classes OCS." +
          endC + '\n')

    #############
    # Etape 2/3 # Calcul de l'indicateur de "hauteur de végétation"
    #############

    print(
        cyan + "occupationIndicator() : " + bold + green +
        "ETAPE 2/3 - Début du calcul de l'indicateur de \"hauteur de végétation\"."
        + endC + '\n')

    computeVegetationHeight(
        temp_grid, output_grid, temp_soil_occupation, input_height_model,
        temp_height_vegetation, divide_vegetation_classes, VEGETATION_LABEL,
        HIGH_VEGETATION_LABEL, LOW_VEGETATION_LABEL, HIGH_VEGETATION_FIELD,
        LOW_VEGETATION_FIELD, VEG_MEAN_FIELD, VEG_MAX_FIELD, VEG_RATE_FIELD,
        CODAGE_FLOAT, SUFFIX_TEMP, no_data_value, format_vector, path_time_log,
        save_results_intermediate, overwrite)

    print(
        cyan + "occupationIndicator() : " + bold + green +
        "ETAPE 2/3 - Fin du calcul de l'indicateur de \"hauteur de végétation\"."
        + endC + '\n')

    #############
    # Etape 3/3 # Calcul de l'indicateur de classe majoritaire
    #############

    print(
        cyan + "occupationIndicator() : " + bold + green +
        "ETAPE 3/3 - Début du calcul de l'indicateur de classe majoritaire." +
        endC + '\n')

    if input_height_model != "":
        computeMajorityClass(output_grid, temp_directory, NODATA_FIELD,
                             BUILT_FIELD, MINERAL_FIELD, BARESOIL_FIELD,
                             WATER_FIELD, VEGETATION_FIELD,
                             HIGH_VEGETATION_FIELD, LOW_VEGETATION_FIELD,
                             MAJ_OCS_FIELD, VEG_MEAN_FIELD,
                             class_label_dico_out, format_vector,
                             extension_vector, overwrite)
    else:
        print(
            cyan + "occupationIndicator() : " + bold + yellow +
            "Pas de calcul de l'indicateur de classe majoritaire demandé (pas de MNH en entrée)."
            + endC + '\n')

    print(cyan + "occupationIndicator() : " + bold + green +
          "ETAPE 3/3 - Fin du calcul de l'indicateur de classe majoritaire." +
          endC + '\n')

    ####################################################################

    # Suppression des fichiers temporaires
    if not save_results_intermediate:
        if debug >= 3:
            print(cyan + "occupationIndicator() : " + endC +
                  "Suppression des fichiers temporaires." + endC + '\n')
        deleteDir(temp_directory)

    print(cyan + "occupationIndicator() : " + bold + green +
          "FIN DES TRAITEMENTS" + endC + '\n')

    # Mise à jour du log
    ending_event = "occupationIndicator() : Fin du traitement : "
    timeLine(path_time_log, ending_event)

    return 0
コード例 #3
0
def statisticsVectorRaster(image_input,
                           vector_input,
                           vector_output,
                           band_number,
                           enable_stats_all_count,
                           enable_stats_columns_str,
                           enable_stats_columns_real,
                           col_to_delete_list,
                           col_to_add_list,
                           class_label_dico,
                           path_time_log,
                           clean_small_polygons=False,
                           format_vector='ESRI Shapefile',
                           save_results_intermediate=False,
                           overwrite=True):

    # INITIALISATION
    if debug >= 3:
        print(cyan + "statisticsVectorRaster() : " + endC + "image_input : " +
              str(image_input) + endC)
        print(cyan + "statisticsVectorRaster() : " + endC + "vector_input : " +
              str(vector_input) + endC)
        print(cyan + "statisticsVectorRaster() : " + endC +
              "vector_output : " + str(vector_output) + endC)
        print(cyan + "statisticsVectorRaster() : " + endC + "band_number : " +
              str(band_number) + endC)
        print(cyan + "statisticsVectorRaster() : " + endC +
              "enable_stats_all_count : " + str(enable_stats_all_count) + endC)
        print(cyan + "statisticsVectorRaster() : " + endC +
              "enable_stats_columns_str : " + str(enable_stats_columns_str) +
              endC)
        print(cyan + "statisticsVectorRaster() : " + endC +
              "enable_stats_columns_real : " + str(enable_stats_columns_real) +
              endC)
        print(cyan + "statisticsVectorRaster() : " + endC +
              "col_to_delete_list : " + str(col_to_delete_list) + endC)
        print(cyan + "statisticsVectorRaster() : " + endC +
              "col_to_add_list : " + str(col_to_add_list) + endC)
        print(cyan + "statisticsVectorRaster() : " + endC +
              "class_label_dico : " + str(class_label_dico) + endC)
        print(cyan + "statisticsVectorRaster() : " + endC +
              "clean_small_polygons : " + str(clean_small_polygons) + endC)
        print(cyan + "statisticsVectorRaster() : " + endC +
              "path_time_log : " + str(path_time_log) + endC)
        print(cyan + "statisticsVectorRaster() : " + endC +
              "format_vector : " + str(format_vector) + endC)
        print(cyan + "statisticsVectorRaster() : " + endC +
              "save_results_intermediate : " + str(save_results_intermediate) +
              endC)
        print(cyan + "statisticsVectorRaster() : " + endC + "overwrite : " +
              str(overwrite) + endC)

    # Constantes
    PREFIX_AREA_COLUMN = "S_"

    # Mise à jour du Log
    starting_event = "statisticsVectorRaster() : Compute statistic crossing starting : "
    timeLine(path_time_log, starting_event)

    # creation du fichier vecteur de sortie
    if vector_output == "":
        vector_output = vector_input  # Précisé uniquement pour l'affichage
    else:
        # Copy vector_output
        copyVectorFile(vector_input, vector_output, format_vector)

    # Vérifications
    image_xmin, image_xmax, image_ymin, image_ymax = getEmpriseImage(
        image_input)
    vector_xmin, vector_xmax, vector_ymin, vector_ymax = getEmpriseFile(
        vector_output, format_vector)
    extension_vector = os.path.splitext(vector_output)[1]

    if round(vector_xmin, 4) < round(image_xmin, 4) or round(
            vector_xmax, 4) > round(image_xmax, 4) or round(
                vector_ymin, 4) < round(image_ymin, 4) or round(
                    vector_ymax, 4) > round(image_ymax, 4):
        print(cyan + "statisticsVectorRaster() : " + bold + red +
              "image_xmin, image_xmax, image_ymin, image_ymax" + endC,
              image_xmin,
              image_xmax,
              image_ymin,
              image_ymax,
              file=sys.stderr)
        print(cyan + "statisticsVectorRaster() : " + bold + red +
              "vector_xmin, vector_xmax, vector_ymin, vector_ymax" + endC,
              vector_xmin,
              vector_xmax,
              vector_ymin,
              vector_ymax,
              file=sys.stderr)
        raise NameError(
            cyan + "statisticsVectorRaster() : " + bold + red +
            "The extend of the vector file (%s) is greater than the image file (%s)"
            % (vector_output, image_input) + endC)

    pixel_size = getPixelSizeImage(image_input)

    # Suppression des très petits polygones qui introduisent des valeurs NaN
    if clean_small_polygons:
        min_size_area = pixel_size * 2
        vector_temp = os.path.splitext(
            vector_output)[0] + "_temp" + extension_vector

        cleanMiniAreaPolygons(vector_output, vector_temp, min_size_area, '',
                              format_vector)
        removeVectorFile(vector_output, format_vector)
        renameVectorFile(vector_temp, vector_output)

    # Récuperation du driver pour le format shape
    driver = ogr.GetDriverByName(format_vector)

    # Ouverture du fichier shape en lecture-écriture
    data_source = driver.Open(vector_output,
                              1)  # 0 means read-only - 1 means writeable.
    if data_source is None:
        print(cyan + "statisticsVectorRaster() : " + bold + red +
              "Impossible d'ouvrir le fichier shape : " + vector_output + endC,
              file=sys.stderr)
        sys.exit(1)  # exit with an error code

    # Récupération du vecteur
    layer = data_source.GetLayer(
        0)  # Recuperation de la couche (une couche contient les polygones)
    layer_definition = layer.GetLayerDefn(
    )  # GetLayerDefn => returns the field names of the user defined (created) fields

    # ETAPE 1/4 : CREATION AUTOMATIQUE DU DICO DE VALEUR SI IL N'EXISTE PAS
    if enable_stats_all_count and class_label_dico == {}:
        image_values_list = identifyPixelValues(image_input)
        # Pour toutes les valeurs
        for id_value in image_values_list:
            class_label_dico[id_value] = str(id_value)
        # Suppression de la valeur no date à 0
        if 0 in class_label_dico:
            del class_label_dico[0]
    if debug >= 2:
        print(class_label_dico)

    # ETAPE 2/4 : CREATION DES COLONNES DANS LE FICHIER SHAPE
    if debug >= 2:
        print(
            cyan + "statisticsVectorRaster() : " + bold + green +
            "ETAPE 1/3 : DEBUT DE LA CREATION DES COLONNES DANS LE FICHIER VECTEUR %s"
            % (vector_output) + endC)

    # En entrée :
    # col_to_add_list = [UniqueID, majority/DateMaj/SrcMaj, minority, min, max, mean, median, sum, std, unique, range, all, count, all_S, count_S] - all traduisant le class_label_dico en autant de colonnes
    # Sous_listes de col_to_add_list à identifier pour des facilités de manipulations ultérieures:
    # col_to_add_inter01_list = [majority/DateMaj/SrcMaj, minority, min, max, mean, median, sum, std, unique, range]
    # col_to_add_inter02_list = [majority, minority, min, max, mean, median, sum, std, unique, range, all, count, all_S, count_S]
    # Construction des listes intermédiaires
    col_to_add_inter01_list = []

    # Valeurs à injecter dans des colonnes - Format String
    if enable_stats_columns_str:
        stats_columns_str_list = ['majority', 'minority']
        for e in stats_columns_str_list:
            col_to_add_list.append(e)

    # Valeurs à injecter dans des colonnes - Format Nbr
    if enable_stats_columns_real:
        stats_columns_real_list = [
            'min', 'max', 'mean', 'median', 'sum', 'std', 'unique', 'range'
        ]
        for e in stats_columns_real_list:
            col_to_add_list.append(e)

    # Valeurs à injecter dans des colonnes - Format Nbr
    if enable_stats_all_count:
        stats_all_count_list = ['all', 'count']
        for e in stats_all_count_list:
            col_to_add_list.append(e)

    # Valeurs à injecter dans des colonnes - si class_label_dico est non vide
    if class_label_dico != {}:
        stats_all_count_list = ['all', 'count']
        for e in stats_all_count_list:
            if not e in col_to_add_list:
                col_to_add_list.append(e)

    # Ajout colonne par colonne
    if "majority" in col_to_add_list:
        col_to_add_inter01_list.append("majority")
    if "DateMaj" in col_to_add_list:
        col_to_add_inter01_list.append("DateMaj")
    if "SrcMaj" in col_to_add_list:
        col_to_add_inter01_list.append("SrcMaj")
    if "minority" in col_to_add_list:
        col_to_add_inter01_list.append("minority")
    if "min" in col_to_add_list:
        col_to_add_inter01_list.append("min")
    if "max" in col_to_add_list:
        col_to_add_inter01_list.append("max")
    if "mean" in col_to_add_list:
        col_to_add_inter01_list.append("mean")
    if "median" in col_to_add_list:
        col_to_add_inter01_list.append("median")
    if "sum" in col_to_add_list:
        col_to_add_inter01_list.append("sum")
    if "std" in col_to_add_list:
        col_to_add_inter01_list.append("std")
    if "unique" in col_to_add_list:
        col_to_add_inter01_list.append("unique")
    if "range" in col_to_add_list:
        col_to_add_inter01_list.append("range")

    # Copy de col_to_add_inter01_list dans col_to_add_inter02_list
    col_to_add_inter02_list = list(col_to_add_inter01_list)

    if "all" in col_to_add_list:
        col_to_add_inter02_list.append("all")
    if "count" in col_to_add_list:
        col_to_add_inter02_list.append("count")
    if "all_S" in col_to_add_list:
        col_to_add_inter02_list.append("all_S")
    if "count_S" in col_to_add_list:
        col_to_add_inter02_list.append("count_S")
    if "DateMaj" in col_to_add_inter02_list:
        col_to_add_inter02_list.remove("DateMaj")
        col_to_add_inter02_list.insert(0, "majority")
    if "SrcMaj" in col_to_add_inter02_list:
        col_to_add_inter02_list.remove("SrcMaj")
        col_to_add_inter02_list.insert(0, "majority")

    # Valeurs à injecter dans des colonnes - Format Nbr
    if enable_stats_all_count:
        stats_all_count_list = ['all_S', 'count_S']
        for e in stats_all_count_list:
            col_to_add_list.append(e)

    # Creation de la colonne de l'identifiant unique
    if ("UniqueID" in col_to_add_list) or ("uniqueID" in col_to_add_list) or (
            "ID" in col_to_add_list):
        field_defn = ogr.FieldDefn(
            "ID", ogr.OFTInteger
        )  # Création du nom du champ dans l'objet stat_classif_field_defn
        layer.CreateField(field_defn)
        if debug >= 3:
            print(cyan + "statisticsVectorRaster() : " + endC +
                  "Creation de la colonne : ID")

    # Creation des colonnes de col_to_add_inter01_list ([majority/DateMaj/SrcMaj, minority, min, max, mean, median, sum, std, unique, range])
    for col in col_to_add_list:
        if layer_definition.GetFieldIndex(
                col
        ) == -1:  # Vérification de l'existence de la colonne col (retour = -1 : elle n'existe pas)
            if col == 'majority' or col == 'DateMaj' or col == 'SrcMaj' or col == 'minority':  # Identification de toutes les colonnes remplies en string
                stat_classif_field_defn = ogr.FieldDefn(
                    col, ogr.OFTString
                )  # Création du champ (string) dans l'objet stat_classif_field_defn
                layer.CreateField(stat_classif_field_defn)
            elif col == 'mean' or col == 'median' or col == 'sum' or col == 'std' or col == 'unique' or col == 'range' or col == 'max' or col == 'min':
                stat_classif_field_defn = ogr.FieldDefn(
                    col, ogr.OFTReal
                )  # Création du champ (real) dans l'objet stat_classif_field_defn
                # Définition de la largeur du champ
                stat_classif_field_defn.SetWidth(20)
                # Définition de la précision du champ valeur flottante
                stat_classif_field_defn.SetPrecision(2)
                layer.CreateField(stat_classif_field_defn)
            if debug >= 3:
                print(cyan + "statisticsVectorRaster() : " + endC +
                      "Creation de la colonne : " + str(col))

    # Creation des colonnes reliées au dictionnaire
    if ('all' in col_to_add_list) or ('count' in col_to_add_list) or (
            'all_S' in col_to_add_list) or ('count_S' in col_to_add_list):
        for col in class_label_dico:

            # Gestion du nom de la colonne correspondant à la classe
            name_col = class_label_dico[col]
            if len(name_col) > 10:
                name_col = name_col[:10]
                print(
                    cyan + "statisticsVectorRaster() : " + bold + yellow +
                    "Nom de la colonne trop long. Il sera tronque a 10 caracteres en cas d'utilisation: "
                    + endC + name_col)

            # Gestion du nom de la colonne correspondant à la surface de la classe
            name_col_area = PREFIX_AREA_COLUMN + name_col
            if len(name_col_area) > 10:
                name_col_area = name_col_area[:10]
                if debug >= 3:
                    print(
                        cyan + "statisticsVectorRaster() : " + bold + yellow +
                        "Nom de la colonne trop long. Il sera tronque a 10 caracteres en cas d'utilisation: "
                        + endC + name_col_area)

            # Ajout des colonnes de % de répartition des éléments du raster
            if ('all' in col_to_add_list) or ('count' in col_to_add_list):
                if layer_definition.GetFieldIndex(
                        name_col
                ) == -1:  # Vérification de l'existence de la colonne name_col (retour = -1 : elle n'existe pas)
                    stat_classif_field_defn = ogr.FieldDefn(
                        name_col, ogr.OFTReal
                    )  # Création du champ (real) dans l'objet stat_classif_field_defn
                    # Définition de la largeur du champ
                    stat_classif_field_defn.SetWidth(20)
                    # Définition de la précision du champ valeur flottante
                    stat_classif_field_defn.SetPrecision(2)
                    if debug >= 3:
                        print(cyan + "statisticsVectorRaster() : " + endC +
                              "Creation de la colonne : " + str(name_col))
                    layer.CreateField(
                        stat_classif_field_defn)  # Ajout du champ

            # Ajout des colonnes de surface des éléments du raster
            if ('all_S' in col_to_add_list) or ('count_S' in col_to_add_list):
                if layer_definition.GetFieldIndex(
                        name_col_area
                ) == -1:  # Vérification de l'existence de la colonne name_col_area (retour = -1 : elle n'existe pas)
                    stat_classif_field_defn = ogr.FieldDefn(
                        name_col_area, ogr.OFTReal
                    )  # Création du nom du champ dans l'objet stat_classif_field_defn
                    # Définition de la largeur du champ
                    stat_classif_field_defn.SetWidth(20)
                    # Définition de la précision du champ valeur flottante
                    stat_classif_field_defn.SetPrecision(2)

                    if debug >= 3:
                        print(cyan + "statisticsVectorRaster() : " + endC +
                              "Creation de la colonne : " + str(name_col_area))
                    layer.CreateField(
                        stat_classif_field_defn)  # Ajout du champ

    if debug >= 2:
        print(
            cyan + "statisticsVectorRaster() : " + bold + green +
            "ETAPE 1/3 : FIN DE LA CREATION DES COLONNES DANS LE FICHIER VECTEUR %s"
            % (vector_output) + endC)

    # ETAPE 3/4 : REMPLISSAGE DES COLONNES DU VECTEUR
    if debug >= 2:
        print(cyan + "statisticsVectorRaster() : " + bold + green +
              "ETAPE 2/3 : DEBUT DU REMPLISSAGE DES COLONNES DU VECTEUR " +
              endC)

    # Calcul des statistiques col_to_add_inter02_list = [majority, minority, min, max, mean, median, sum, std, unique, range, all, count, all_S, count_S] de croisement images_raster / vecteur
    # Utilisation de la librairie rasterstat
    if debug >= 3:
        print(cyan + "statisticsVectorRaster() : " + bold + green +
              "Calcul des statistiques " + endC +
              "Stats : %s - Vecteur : %s - Raster : %s" %
              (col_to_add_inter02_list, vector_output, image_input) + endC)
    stats_info_list = raster_stats(vector_output,
                                   image_input,
                                   band_num=band_number,
                                   stats=col_to_add_inter02_list)

    # Decompte du nombre de polygones
    num_features = layer.GetFeatureCount()
    if debug >= 3:
        print(cyan + "statisticsVectorRaster() : " + bold + green +
              "Remplissage des colonnes polygone par polygone " + endC)
    if debug >= 3:
        print(cyan + "statisticsVectorRaster() : " + endC +
              "Nombre total de polygones : " + str(num_features))

    polygone_count = 0

    for polygone_stats in stats_info_list:  # Pour chaque polygone représenté dans stats_info_list - et il y a autant de polygone que dans le fichier vecteur

        # Extraction de feature
        feature = layer.GetFeature(polygone_stats['__fid__'])

        polygone_count = polygone_count + 1

        if debug >= 3 and polygone_count % 10000 == 0:
            print(cyan + "statisticsVectorRaster() : " + endC +
                  "Avancement : %s polygones traites sur %s" %
                  (polygone_count, num_features))
        if debug >= 5:
            print(
                cyan + "statisticsVectorRaster() : " + endC +
                "Traitement du polygone : ",
                stats_info_list.index(polygone_stats) + 1)

        # Remplissage de l'identifiant unique
        if ("UniqueID" in col_to_add_list) or (
                "uniqueID" in col_to_add_list) or ("ID" in col_to_add_list):
            feature.SetField('ID', int(stats_info_list.index(polygone_stats)))

        # Initialisation à 0 des colonnes contenant le % de répartition de la classe - Verifier ce qu'il se passe si le nom dépasse 10 caracteres
        if ('all' in col_to_add_list) or ('count' in col_to_add_list):
            for element in class_label_dico:
                name_col = class_label_dico[element]
                if len(name_col) > 10:
                    name_col = name_col[:10]
                feature.SetField(name_col, 0)

        # Initialisation à 0 des colonnes contenant la surface correspondant à la classe - Verifier ce qu'il se passe si le nom dépasse 10 caracteres
        if ('all_S' in col_to_add_list) or ('count_S' in col_to_add_list):
            for element in class_label_dico:
                name_col = class_label_dico[element]
                name_col_area = PREFIX_AREA_COLUMN + name_col
                if len(name_col_area) > 10:
                    name_col_area = name_col_area[:10]
                feature.SetField(name_col_area, 0)

        # Remplissage des colonnes contenant le % de répartition et la surface des classes
        if ('all' in col_to_add_list) or ('count' in col_to_add_list) or (
                'all_S' in col_to_add_list) or ('count_S' in col_to_add_list):
            # 'all' est une liste des couples : (Valeur_du_pixel_sur_le_raster, Nbr_pixel_ayant_cette_valeur) pour le polygone observe.
            # Ex : [(0,183),(803,45),(801,4)] : dans le polygone, il y a 183 pixels de valeur 0, 45 pixels de valeur 803 et 4 pixels de valeur 801
            majority_all = polygone_stats['all']

            # Deux valeurs de pixel peuvent faire référence à une même colonne. Par exemple : les pixels à 201, 202, 203 peuvent correspondre à la BD Topo
            # Regroupement des éléments de majority_all allant dans la même colonne au regard de class_label_dico
            count_for_idx_couple = 0  # Comptage du nombre de modifications (suppression de couple) de majority_all pour adapter la valeur de l'index lors de son parcours

            for idx_couple in range(
                    1, len(majority_all)
            ):  # Inutile d'appliquer le traitement au premier élément (idx_couple == 0)

                idx_couple = idx_couple - count_for_idx_couple  # Prise en compte dans le parcours de majority_all des couples supprimés
                couple = majority_all[idx_couple]  # Ex : couple = (803,45)

                if (couple is None) or (
                        couple == ""
                ):  # en cas de bug de rasterstats (erreur geometrique du polygone par exemple)
                    if debug >= 3:
                        print(
                            cyan + "statisticsVectorRaster() : " + bold + red +
                            "Probleme detecte dans la gestion du polygone %s" %
                            (polygone_count) + endC,
                            file=sys.stderr)
                    pass
                else:
                    for idx_verif in range(idx_couple):
                        # Vérification au regard des éléments présents en amont dans majority_all
                        # Cas où le nom correspondant au label a déjà été rencontré dans majority_all
                        # Vérification que les pixels de l'image sont réferncés dans le dico
                        if couple[0] in class_label_dico:

                            if class_label_dico[couple[0]] == class_label_dico[
                                    majority_all[idx_verif][0]]:
                                majority_all[idx_verif] = (
                                    majority_all[idx_verif][0],
                                    majority_all[idx_verif][1] + couple[1]
                                )  # Ajout du nombre de pixels correspondant dans le couple précédent
                                majority_all.remove(
                                    couple
                                )  # Supression du couple présentant le "doublon"
                                count_for_idx_couple = count_for_idx_couple + 1  # Mise à jour du décompte de modifications
                                break
                        else:
                            raise NameError(
                                cyan + "statisticsVectorRaster() : " + bold +
                                red +
                                "The image file (%s) contain pixel value '%d' not identified into class_label_dico"
                                % (image_input, couple[0]) + endC)

            # Intégration des valeurs de majority all dans les colonnes
            for couple_value_count in majority_all:  # Parcours de majority_all. Ex : couple_value_count = (803,45)
                if (couple_value_count is None) or (
                        couple_value_count == ""
                ):  # en cas de bug de rasterstats (erreur geometrique du polygone par exemple)
                    if debug >= 3:
                        print(
                            cyan + "statisticsVectorRaster() : " + bold + red +
                            "Probleme detecte dans la gestion du polygone %s" %
                            (polygone_count) + endC,
                            file=sys.stderr)
                    pass
                else:
                    nb_pixel_total = polygone_stats[
                        'count']  # Nbr de pixels du polygone
                    pixel_value = couple_value_count[0]  # Valeur du pixel
                    value_count = couple_value_count[
                        1]  # Nbr de pixels ayant cette valeur
                    name_col = class_label_dico[
                        pixel_value]  # Transformation de la valeur du pixel en "signification" au regard du dictionnaire. Ex : BD Topo ou 2011
                    name_col_area = PREFIX_AREA_COLUMN + name_col  # Identification du nom de la colonne en surfaces

                    if len(name_col) > 10:
                        name_col = name_col[:10]
                    if len(name_col_area) > 10:
                        name_col_area = name_col_area[:10]

                    value_area = pixel_size * value_count  # Calcul de la surface du polygone correspondant à la valeur du pixel
                    if nb_pixel_total != None and nb_pixel_total != 0:
                        percentage = (
                            float(value_count) / float(nb_pixel_total)
                        ) * 100  # Conversion de la surface en pourcentages, arondi au pourcent
                    else:
                        if debug >= 3:
                            print(
                                cyan + "statisticsVectorRaster() : " + bold +
                                red +
                                "Probleme dans l'identification du nombre de pixels du polygone %s : le pourcentage de %s est mis à 0"
                                % (polygone_count, name_col) + endC,
                                file=sys.stderr)
                        percentage = 0.0

                    if ('all' in col_to_add_list) or ('count'
                                                      in col_to_add_list):
                        feature.SetField(
                            name_col, percentage
                        )  # Injection du pourcentage dans la colonne correpondante
                    if ('all_S' in col_to_add_list) or ('count_S'
                                                        in col_to_add_list):
                        feature.SetField(
                            name_col_area, value_area
                        )  # Injection de la surface dans la colonne correpondante
        else:
            pass

        # Remplissage des colonnes statistiques demandées ( col_to_add_inter01_list = [majority/DateMaj/SrcMaj, minority, min, max, mean, median, sum, std, unique, range] )
        for stats in col_to_add_inter01_list:

            if stats == 'DateMaj' or stats == 'SrcMaj':  # Cas particulier de 'DateMaj' et 'SrcMaj' : le nom de la colonne est DateMaj ou SrcMaj, mais la statistique utilisée est identifiée par majority
                name_col = stats  # Nom de la colonne. Ex : 'DateMaj'
                value_statis = polygone_stats[
                    'majority']  # Valeur majoritaire. Ex : '203'
                if value_statis == None:
                    value_statis_class = 'nan'
                else:
                    value_statis_class = class_label_dico[
                        value_statis]  # Transformation de la valeur au regard du dictionnaire. Ex : '2011'
                feature.SetField(name_col,
                                 value_statis_class)  # Ajout dans la colonne

            elif (stats is None) or (stats == "") or (
                    polygone_stats[stats] is
                    None) or (polygone_stats[stats]) == "" or (
                        polygone_stats[stats]) == 'nan':
                # En cas de bug de rasterstats (erreur geometrique du polygone par exemple)
                pass

            else:
                name_col = stats  # Nom de la colonne. Ex : 'majority', 'max'
                value_statis = polygone_stats[
                    stats]  # Valeur à associer à la colonne, par exemple '2011'

                if (
                        name_col == 'majority' or name_col == 'minority'
                ) and class_label_dico != []:  # Cas où la colonne fait référence à une valeur du dictionnaire
                    value_statis_class = class_label_dico[value_statis]
                else:
                    value_statis_class = value_statis

                feature.SetField(name_col, value_statis_class)

        layer.SetFeature(feature)
        feature.Destroy()

    if debug >= 2:
        print(cyan + "statisticsVectorRaster() : " + bold + green +
              "ETAPE 2/3 : FIN DU REMPLISSAGE DES COLONNES DU VECTEUR %s" %
              (vector_output) + endC)

    # ETAPE 4/4 : SUPRESSION DES COLONNES NON SOUHAITEES
    if col_to_delete_list != []:

        if debug >= 2:
            print(cyan + "statisticsVectorRaster() : " + bold + green +
                  "ETAPE 3/3 : DEBUT DES SUPPRESSIONS DES COLONNES %s" %
                  (col_to_delete_list) + endC)

        for col_to_delete in col_to_delete_list:

            if layer_definition.GetFieldIndex(
                    col_to_delete
            ) != -1:  # Vérification de l'existence de la colonne col (retour = -1 : elle n'existe pas)

                layer.DeleteField(layer_definition.GetFieldIndex(
                    col_to_delete))  # Suppression de la colonne

                if debug >= 3:
                    print(cyan + "statisticsVectorRaster() : " + endC +
                          "Suppression de %s" % (col_to_delete) + endC)

        if debug >= 2:
            print(cyan + "statisticsVectorRaster() : " + bold + green +
                  "ETAPE 3/3 : FIN DE LA SUPPRESSION DES COLONNES" + endC)

    else:
        print(cyan + "statisticsVectorRaster() : " + bold + yellow +
              "ETAPE 3/3 : AUCUNE SUPPRESSION DE COLONNE DEMANDEE" + endC)

    # Fermeture du fichier shape
    layer.SyncToDisk()
    layer = None
    data_source.Destroy()

    # Mise à jour du Log
    ending_event = "statisticsVectorRaster() : Compute statistic crossing ending : "
    timeLine(path_time_log, ending_event)

    return
def computeVegetationHeight(
        input_grid, output_grid, soil_occupation, height_model,
        height_vegetation, divide_vegetation_classes, vegetation_label,
        high_vegetation_label, low_vegetation_label, high_vegetation_field,
        low_vegetation_field, veg_mean_field, veg_max_field, veg_rate_field,
        codage_float, suffix_temp, no_data_value, format_vector, path_time_log,
        save_results_intermediate, overwrite):

    temp_grid = os.path.splitext(
        input_grid)[0] + suffix_temp + os.path.splitext(input_grid)[1]

    if height_model != "":

        ### Récupération de la hauteur de végétation

        if debug >= 3:
            print(cyan + "computeVegetationHeight() : " + endC + bold +
                  "Récupération de la hauteur de végétation." + endC + '\n')

        if not divide_vegetation_classes:
            expression = "im1b1 == %s ? im2b1 : %s" % (vegetation_label,
                                                       no_data_value)
        else:
            expression = "im1b1 == %s or im1b1 == %s ? im2b1 : %s" % (
                high_vegetation_label, low_vegetation_label, no_data_value)

        command = "otbcli_BandMath -il %s %s -out %s %s -exp '%s'" % (
            soil_occupation, height_model, height_vegetation, codage_float,
            expression)
        if debug >= 3:
            print(command)
        exit_code = os.system(command)
        if exit_code != 0:
            raise NameError(
                cyan + "computeVegetationHeight() : " + bold + red +
                "Erreur lors de la récupération de la hauteur de végétation." +
                endC)

        ### Récupération de la hauteur moyenne de végétation

        if debug >= 3:
            print(cyan + "computeVegetationHeight() : " + endC + bold +
                  "Récupération de la hauteur moyenne de végétation." + endC +
                  '\n')

        col_to_delete_list = ["min", "median", "sum", "std", "unique", "range"]
        statisticsVectorRaster(height_vegetation, input_grid, temp_grid, 1,
                               False, False, True, col_to_delete_list, [], {},
                               path_time_log, True, format_vector,
                               save_results_intermediate, overwrite)

        renameFieldsVector(temp_grid, ['mean'], [veg_mean_field],
                           format_vector=format_vector)
        renameFieldsVector(temp_grid, ['max'], [veg_max_field],
                           format_vector=format_vector)

    else:
        print(
            cyan + "computeVegetationHeight() : " + bold + yellow +
            "Pas de calcul de l'indicateur 'hauteur moyenne de végétation' (pas de MNH en entrée)."
            + endC + '\n')
        copyVectorFile(input_grid, temp_grid, format_vector=format_vector)

    if divide_vegetation_classes:

        ### Récupération du taux de végétation haute

        if debug >= 3:
            print(cyan + "computeVegetationHeight() : " + endC + bold +
                  "Récupération du taux de végétation haute." + endC + '\n')

        sql_statement = "SELECT *, ((%s/(%s+%s))*100) AS %s FROM %s" % (
            high_vegetation_field, high_vegetation_field, low_vegetation_field,
            veg_rate_field, os.path.splitext(os.path.basename(temp_grid))[0])

        command = "ogr2ogr -sql '%s' -dialect SQLITE %s %s" % (
            sql_statement, output_grid, temp_grid)
        if debug >= 3:
            print(command)
        exit_code = os.system(command)
        if exit_code != 0:
            raise NameError(
                cyan + "computeVegetationHeight() : " + bold + red +
                "Erreur lors de la récupération du taux de végétation haute." +
                endC)

    else:
        print(
            cyan + "computeVegetationHeight() : " + bold + yellow +
            "Pas de calcul de l'indicateur 'taux de végétation haute' (pas de distinction végétation haute/basse dans l'OCS)."
            + endC + '\n')
        copyVectorFile(temp_grid, output_grid, format_vector=format_vector)

    return 0
コード例 #5
0
def main(gui=False):

    # Définition des arguments possibles pour l'appel en ligne de commande
    parser = argparse.ArgumentParser(
        formatter_class=argparse.RawTextHelpFormatter,
        prog="ClassReallocationVector",
        description="\
    Info : Automatic reallocation of class, from vector. \n\
    Objectif : Gerer la re affectation de classes en reaffectant sur les vecteurs d'apprentissage en micro-classe de la classification supervisee, \n\
    suppressions et reaffectations possibles. \n\
    Example : python ClassReallocationVector.py -v ../ImagesTestChaine/APTV_05/Micro/APTV_05_cleaned.shp \n\
                                                -t ../ImagesTestChaine/APTV_05/Micro/APTV_05_prop_tab.txt \n\
                                                -id id \n\
                                                -log ../ImagesTestChaine/APTV_05/fichierTestLog.txt"
    )

    # Paramètres
    parser.add_argument('-v',
                        '--vector_input',
                        default="",
                        help="Vector input contain the validation sample",
                        type=str,
                        required=True)
    parser.add_argument('-t',
                        '--proposal_table_input',
                        default="",
                        help="Proposal table input to realocation micro class",
                        type=str,
                        required=True)
    parser.add_argument(
        '-o',
        '--vector_output',
        default="",
        help=
        "Vector output re-allocated . Warning!!! if is emppty the input vector file is modified.",
        type=str,
        required=False)
    parser.add_argument('-id',
                        '--validation_id',
                        default="id",
                        help="Label to identify the class",
                        type=str,
                        required=False)
    parser.add_argument('-vef',
                        '--format_vector',
                        default="ESRI Shapefile",
                        help="Format of the output file.",
                        type=str,
                        required=False)
    parser.add_argument('-log',
                        '--path_time_log',
                        default="",
                        help="Name of log",
                        type=str,
                        required=False)
    parser.add_argument(
        '-sav',
        '--save_results_inter',
        action='store_true',
        default=False,
        help=
        "Save or delete intermediate result after the process. By default, False",
        required=False)
    parser.add_argument(
        '-now',
        '--overwrite',
        action='store_false',
        default=True,
        help="Overwrite files with same names. By default, True",
        required=False)
    parser.add_argument(
        '-debug',
        '--debug',
        default=3,
        help="Option : Value of level debug trace, default : 3 ",
        type=int,
        required=False)
    args = displayIHM(gui, parser)

    # RECUPERATION DES ARGUMENTS

    # Récupération du fichier vecteur d'entrée
    if args.vector_input != None:
        vector_input = args.vector_input
        if not os.path.isfile(vector_input):
            raise NameError(cyan + "ClassReallocationVector : " + bold + red +
                            "File %s not existe!" % (vector_input) + endC)

    # Récupération de la table de proposition d'entrée
    if args.proposal_table_input != None:
        proposal_table_input = args.proposal_table_input
        if not os.path.isfile(proposal_table_input):
            raise NameError(cyan + "ClassReallocationVector : " + bold + red +
                            "File %s not existe!" % (proposal_table_input) +
                            endC)

    # Récupération du vecteur de sortie
    if args.vector_output != None and args.vector_output != "":
        vector_output = args.vector_output
    else:
        vector_output = None

    # field validation
    if args.validation_id != None:
        validation_id_field = args.validation_id

    # Récupération du format du fichier de sortie
    if args.format_vector != None:
        format_vector = args.format_vector

    # Récupération du nom du fichier log
    if args.path_time_log != None:
        path_time_log = args.path_time_log

    if args.save_results_inter != None:
        save_results_intermediate = args.save_results_inter

    if args.overwrite != None:
        overwrite = args.overwrite

    # Récupération de l'option niveau de debug
    if args.debug != None:
        global debug
        debug = args.debug

    # Affichage des arguments récupérés
    if debug >= 3:
        print(bold + green + "Variables dans le parser" + endC)
        print(cyan + "ClassReallocationVector : " + endC + "vector_input : " +
              str(vector_input) + endC)
        print(cyan + "ClassReallocationVector : " + endC +
              "proposal_table_input : " + str(proposal_table_input) + endC)
        print(cyan + "ClassReallocationRaster : " + endC + "vector_output : " +
              str(vector_output) + endC)
        print(cyan + "ClassReallocationVector : " + endC + "validation_id : " +
              str(validation_id_field) + endC)
        print(cyan + "ClassReallocationVector : " + endC + "format_vector : " +
              str(format_vector) + endC)
        print(cyan + "ClassReallocationVector : " + endC + "path_time_log : " +
              str(path_time_log) + endC)
        print(cyan + "ClassReallocationVector : " + endC +
              "save_results_inter : " + str(save_results_intermediate) + endC)
        print(cyan + "ClassReallocationVector : " + endC + "overwrite : " +
              str(overwrite) + endC)
        print(cyan + "ClassReallocationVector : " + endC + "debug : " +
              str(debug) + endC)

    # EXECUTION DE LA FONCTION
    vector_file = vector_input

    if vector_output != None:
        repertory_output = os.path.dirname(vector_output)
        if not os.path.isdir(repertory_output):
            os.makedirs(repertory_output)
        try:
            copyVectorFile(vector_input, vector_output, format_vector)
        except RuntimeError:
            raise NameError(cyan + "ClassReallocationVector() : " + bold +
                            red + "An error occured during copy file : " +
                            vector_input + " See error message above." + endC)
        vector_file = vector_output

    # reallocation
    reallocClassVector(vector_file, proposal_table_input, path_time_log,
                       validation_id_field, format_vector,
                       save_results_intermediate, overwrite)
コード例 #6
0
def comparareClassificationToReferenceGrid(image_input,
                                           vector_cut_input,
                                           vector_sample_input,
                                           vector_grid_input,
                                           vector_grid_output,
                                           size_grid,
                                           field_value_verif,
                                           no_data_value,
                                           path_time_log,
                                           epsg=2154,
                                           format_raster='GTiff',
                                           format_vector="ESRI Shapefile",
                                           extension_raster=".tif",
                                           extension_vector=".shp",
                                           save_results_intermediate=False,
                                           overwrite=True):

    # Mise à jour du Log
    starting_event = "comparareClassificationToReferenceGrid() : starting : "
    timeLine(path_time_log, starting_event)

    print(endC)
    print(bold + green +
          "## START : COMPARE QUALITY FROM CLASSIF IMAGE BY GRID" + endC)
    print(endC)

    if debug >= 2:
        print(
            bold + green +
            "comparareClassificationToReferenceGrid() : Variables dans la fonction"
            + endC)
        print(cyan + "comparareClassificationToReferenceGrid() : " + endC +
              "image_input : " + str(image_input) + endC)
        print(cyan + "comparareClassificationToReferenceGrid() : " + endC +
              "vector_cut_input : " + str(vector_cut_input) + endC)
        print(cyan + "comparareClassificationToReferenceGrid() : " + endC +
              "vector_sample_input : " + str(vector_sample_input) + endC)
        print(cyan + "comparareClassificationToReferenceGrid() : " + endC +
              "vector_grid_input : " + str(vector_grid_input) + endC)
        print(cyan + "comparareClassificationToReferenceGrid() : " + endC +
              "vector_grid_output : " + str(vector_grid_output) + endC)
        print(cyan + "comparareClassificationToReferenceGrid() : " + endC +
              "size_grid : " + str(size_grid) + endC)
        print(cyan + "comparareClassificationToReferenceGrid() : " + endC +
              "field_value_verif : " + str(field_value_verif))
        print(cyan + "comparareClassificationToReferenceGrid() : " + endC +
              "no_data_value : " + str(no_data_value))
        print(cyan + "comparareClassificationToReferenceGrid() : " + endC +
              "path_time_log : " + str(path_time_log) + endC)
        print(cyan + "comparareClassificationToReferenceGrid() : " + endC +
              "epsg  : " + str(epsg) + endC)
        print(cyan + "comparareClassificationToReferenceGrid() : " + endC +
              "format_raster : " + str(format_raster) + endC)
        print(cyan + "comparareClassificationToReferenceGrid() : " + endC +
              "format_vector : " + str(format_vector) + endC)
        print(cyan + "comparareClassificationToReferenceGrid() : " + endC +
              "extension_raster : " + str(extension_raster) + endC)
        print(cyan + "comparareClassificationToReferenceGrid() : " + endC +
              "extension_vector : " + str(extension_vector) + endC)
        print(cyan + "comparareClassificationToReferenceGrid() : " + endC +
              "save_results_intermediate : " + str(save_results_intermediate) +
              endC)
        print(cyan + "comparareClassificationToReferenceGrid() : " + endC +
              "overwrite : " + str(overwrite) + endC)

    # ETAPE 0 : PREPARATION DES FICHIERS INTERMEDIAIRES'

    CODAGE = "uint16"
    SUFFIX_STUDY = '_study'
    SUFFIX_TEMP = '_temp'
    SUFFIX_FUSION = '_other_fusion'

    NONE_VALUE_QUANTITY = -1.0
    FIELD_VALUE_OTHER = 65535

    FIELD_NAME_ID = "id"
    FIELD_NAME_RATE_BUILD = "rate_build"
    FIELD_NAME_RATE_OTHER = "rate_other"
    FIELD_NAME_SREF_BUILD = "sref_build"
    FIELD_NAME_SCLA_BUILD = "scla_build"
    FIELD_NAME_SREF_OTHER = "sref_other"
    FIELD_NAME_SCLA_OTHER = "scla_other"
    FIELD_NAME_KAPPA = "kappa"
    FIELD_NAME_ACCURACY = "accuracy"

    pixel_size_x, pixel_size_y = getPixelWidthXYImage(image_input)

    repertory_output = os.path.dirname(vector_grid_output)
    base_name = os.path.splitext(os.path.basename(vector_grid_output))[0]

    vector_study = repertory_output + os.sep + base_name + SUFFIX_STUDY + extension_vector
    vector_grid_temp = repertory_output + os.sep + base_name + SUFFIX_TEMP + extension_vector
    image_raster_other_fusion = repertory_output + os.sep + base_name + SUFFIX_FUSION + extension_raster

    # ETAPE 0 : VERIFICATION

    # Verification de la valeur de la nomemclature à verifier
    if field_value_verif >= FIELD_VALUE_OTHER:
        print(
            cyan + "comparareClassificationToReferenceGrid() : " + bold + red +
            "Attention de valeur de nomenclature à vérifier  : " +
            str(field_value_verif) +
            " doit être inferieur à la valeur de fusion des valeur autre arbitraire de : "
            + str(FIELD_VALUE_OTHER) + endC,
            file=sys.stderr)
        sys.exit(1)  #exit with an error code

    # ETAPE 1 : DEFINIR UN SHAPE ZONE D'ETUDE

    if (not vector_cut_input is None) and (vector_cut_input != "") and (
            os.path.isfile(vector_cut_input)):
        cutting_action = True
        vector_study = vector_cut_input
    else:
        cutting_action = False
        createVectorMask(image_input, vector_study)

    # ETAPE 2 : UNIFORMISATION DE LA ZONE OTHER

    # Réalocation des valeurs de classification pour les valeurs autre que le bati
    change_reaff_value_list = []
    reaff_value_list = identifyPixelValues(image_input)
    if field_value_verif in reaff_value_list:
        reaff_value_list.remove(field_value_verif)
    if no_data_value in reaff_value_list:
        reaff_value_list.remove(no_data_value)
    for elem in reaff_value_list:
        change_reaff_value_list.append(FIELD_VALUE_OTHER)
    reallocateClassRaster(image_input, image_raster_other_fusion,
                          reaff_value_list, change_reaff_value_list)

    # ETAPE 3 : CREATION DE LA GRILLE SUR LA ZONE D'ETUDE

    # Définir les attibuts du fichier
    attribute_dico = {
        FIELD_NAME_ID: ogr.OFTInteger,
        FIELD_NAME_RATE_BUILD: ogr.OFTReal,
        FIELD_NAME_RATE_OTHER: ogr.OFTReal,
        FIELD_NAME_SREF_BUILD: ogr.OFTReal,
        FIELD_NAME_SCLA_BUILD: ogr.OFTReal,
        FIELD_NAME_SREF_OTHER: ogr.OFTReal,
        FIELD_NAME_SCLA_OTHER: ogr.OFTReal,
        FIELD_NAME_KAPPA: ogr.OFTReal,
        FIELD_NAME_ACCURACY: ogr.OFTReal
    }
    nb_polygon = 0

    if (not vector_grid_input is None) and (vector_grid_input != "") and (
            os.path.isfile(vector_grid_input)):
        # Utilisation du fichier grille d'entrée

        # Recopie du fichier grille d'entrée vers le fichier grille de sortie
        copyVectorFile(vector_grid_input, vector_grid_output)

        # Ajout des champs au fichier grille de sortie
        for field_name in attribute_dico:
            addNewFieldVector(vector_grid_output, field_name,
                              attribute_dico[field_name], None, None, None,
                              format_vector)

        # Mettre le champs "id" identifiant du carré de l'élément de la grille
        nb_polygon = updateIndexVector(vector_grid_output, FIELD_NAME_ID,
                                       format_vector)

    else:
        # Si il n'existe pas de fichier grille on en créer un avec la valeur de size_grid

        # Creer le fichier grille
        nb_polygon = createGridVector(vector_study, vector_grid_temp,
                                      size_grid, size_grid, attribute_dico,
                                      overwrite, epsg, format_vector)

        # Découper la grille avec le shape zone d'étude
        cutVectorAll(vector_study, vector_grid_temp, vector_grid_output,
                     format_vector)

    # ETAPE 4 : CALCUL DE L'INDICATEUR DE QUALITE POUR CHAQUE CASE DE LA GRILLE

    if debug >= 2:
        print(bold + "nb_polygon = " + endC + str(nb_polygon) + "\n")

    # Pour chaque polygone existant
    sum_rate_quantity_build = 0
    nb_rate_sum = 0
    size_area_pixel = abs(pixel_size_x * pixel_size_y)

    for id_polygon in range(nb_polygon):
        geom_list = getGeomPolygons(vector_grid_output, FIELD_NAME_ID,
                                    id_polygon, format_vector)
        if geom_list is not None and geom_list != []:  # and (id_polygon == 24 or id_polygon == 30):

            if debug >= 1:
                print(cyan + "comparareClassificationToReferenceGrid() : " +
                      bold + green +
                      "Calcul de la matrice pour le polygon n°: " +
                      str(id_polygon) + endC)

            geom = geom_list[0]
            class_ref_list, class_pro_list, rate_quantity_list, kappa, accuracy, matrix = computeQualityIndiceRateQuantity(
                image_raster_other_fusion, vector_sample_input,
                repertory_output, base_name + str(id_polygon), geom, size_grid,
                pixel_size_x, pixel_size_y, field_value_verif,
                FIELD_VALUE_OTHER, no_data_value, epsg, format_raster,
                format_vector, extension_raster, extension_vector, overwrite,
                save_results_intermediate)

            # Si les calculs indicateurs de qualité sont ok
            if debug >= 2:
                print(matrix)
            if matrix != None and matrix != [] and matrix[0] != []:

                # Récuperer la quantité de bati et calcul de la surface de référence et de la surface de classification (carreau entier ou pas!)
                if len(class_ref_list) == 2 and len(
                        class_pro_list
                ) == 2:  # Cas ou l'on a des pixels de build et other (en ref et en prod)
                    rate_quantity_build = rate_quantity_list[0]
                    rate_quantity_other = rate_quantity_list[1]
                    size_area_ref_build = (matrix[0][0] +
                                           matrix[0][1]) * size_area_pixel
                    size_area_classif_build = (matrix[0][0] +
                                               matrix[1][0]) * size_area_pixel
                    size_area_ref_other = (matrix[1][0] +
                                           matrix[1][1]) * size_area_pixel
                    size_area_classif_other = (matrix[0][1] +
                                               matrix[1][1]) * size_area_pixel
                    sum_rate_quantity_build += rate_quantity_build
                    nb_rate_sum += 1

                else:  # Cas ou l'on a uniquement des pixels de build OU uniquement des pixels de other

                    if class_ref_list[
                            0] == field_value_verif:  # Cas ou l'on a uniquement des pixels references build
                        rate_quantity_build = rate_quantity_list[0]
                        rate_quantity_other = NONE_VALUE_QUANTITY
                        size_area_ref_other = 0

                        if len(
                                class_pro_list
                        ) == 2:  # Cas ou l'on a des pixels de prod build et other
                            size_area_ref_build = (
                                matrix[0][0] + matrix[0][1]) * size_area_pixel
                            size_area_classif_build = matrix[0][
                                0] * size_area_pixel
                            size_area_classif_other = matrix[0][
                                1] * size_area_pixel

                        else:
                            size_area_ref_build = matrix[0][0] * size_area_pixel
                            if class_pro_list[
                                    0] == field_value_verif:  # Cas ou l'on a uniquement des pixels prod build
                                size_area_classif_build = matrix[0][
                                    0] * size_area_pixel
                                size_area_classif_other = 0

                            else:  # Cas ou l'on a uniquement des pixels prod other
                                size_area_classif_build = 0
                                size_area_classif_other = matrix[0][
                                    0] * size_area_pixel

                    else:  # Cas ou l'on a uniquement des pixels references other
                        rate_quantity_build = NONE_VALUE_QUANTITY
                        rate_quantity_other = rate_quantity_list[0]
                        size_area_ref_build = 0

                        if len(
                                class_pro_list
                        ) == 2:  # Cas ou l'on a des pixels de prod build et other
                            size_area_ref_other = (
                                matrix[0][0] + matrix[0][1]) * size_area_pixel
                            size_area_classif_build = matrix[0][
                                0] * size_area_pixel
                            size_area_classif_other = matrix[0][
                                1] * size_area_pixel

                        else:
                            size_area_ref_other = matrix[0][0] * size_area_pixel
                            if class_pro_list[
                                    0] == field_value_verif:  # Cas ou l'on a uniquement des pixels prod build
                                size_area_classif_build = matrix[0][
                                    0] * size_area_pixel
                                size_area_classif_other = 0

                            else:  # Cas ou l'on a uniquement des pixels prod other
                                size_area_classif_build = 0
                                size_area_classif_other = matrix[0][
                                    0] * size_area_pixel

                # Mettre à jour ses éléments du carré de la grille
                setAttributeValues(
                    vector_grid_output, FIELD_NAME_ID, id_polygon, {
                        FIELD_NAME_RATE_BUILD: rate_quantity_build,
                        FIELD_NAME_RATE_OTHER: rate_quantity_other,
                        FIELD_NAME_SREF_BUILD: size_area_ref_build,
                        FIELD_NAME_SCLA_BUILD: size_area_classif_build,
                        FIELD_NAME_SREF_OTHER: size_area_ref_other,
                        FIELD_NAME_SCLA_OTHER: size_area_classif_other,
                        FIELD_NAME_KAPPA: kappa,
                        FIELD_NAME_ACCURACY: accuracy
                    }, format_vector)

    # Calcul de la moyenne
    if nb_rate_sum != 0:
        average_quantity_build = sum_rate_quantity_build / nb_rate_sum
    else:
        average_quantity_build = 0
    if debug >= 2:
        print(bold + "nb_polygon_used = " + endC + str(nb_rate_sum))
        print(bold + "average_quantity_build = " + endC +
              str(average_quantity_build) + "\n")

    # ETAPE 5 : SUPPRESIONS FICHIERS INTERMEDIAIRES INUTILES

    # Suppression des données intermédiairess
    if not save_results_intermediate:

        if not cutting_action:
            if os.path.isfile(vector_study):
                removeVectorFile(vector_study)

        if os.path.isfile(image_raster_other_fusion):
            removeFile(image_raster_other_fusion)

        if os.path.isfile(vector_grid_temp):
            removeVectorFile(vector_grid_temp)

    print(endC)
    print(bold + green +
          "## END : COMPARE QUALITY FROM CLASSIF IMAGE BY GRID" + endC)
    print(endC)

    # Mise à jour du Log
    ending_event = "comparareClassificationToReferenceGrid() :  ending : "
    timeLine(path_time_log, ending_event)

    return average_quantity_build
コード例 #7
0
def estimateQualityClassification(image_input,
                                  vector_cut_input,
                                  vector_sample_input,
                                  vector_output,
                                  nb_dot,
                                  no_data_value,
                                  column_name_vector,
                                  column_name_ref,
                                  column_name_class,
                                  path_time_log,
                                  epsg=2154,
                                  format_raster='GTiff',
                                  format_vector="ESRI Shapefile",
                                  extension_raster=".tif",
                                  extension_vector=".shp",
                                  save_results_intermediate=False,
                                  overwrite=True):

    # Mise à jour du Log
    starting_event = "estimateQualityClassification() : Masks creation starting : "
    timeLine(path_time_log, starting_event)

    print(endC)
    print(bold + green +
          "## START : CREATE PRINT POINTS FILE FROM CLASSIF IMAGE" + endC)
    print(endC)

    if debug >= 2:
        print(bold + green +
              "estimateQualityClassification() : Variables dans la fonction" +
              endC)
        print(cyan + "estimateQualityClassification() : " + endC +
              "image_input : " + str(image_input) + endC)
        print(cyan + "estimateQualityClassification() : " + endC +
              "vector_cut_input : " + str(vector_cut_input) + endC)
        print(cyan + "estimateQualityClassification() : " + endC +
              "vector_sample_input : " + str(vector_sample_input) + endC)
        print(cyan + "estimateQualityClassification() : " + endC +
              "vector_output : " + str(vector_output) + endC)
        print(cyan + "estimateQualityClassification() : " + endC +
              "nb_dot : " + str(nb_dot) + endC)
        print(cyan + "estimateQualityClassification() : " + endC +
              "no_data_value : " + str(no_data_value) + endC)
        print(cyan + "estimateQualityClassification() : " + endC +
              "column_name_vector : " + str(column_name_vector) + endC)
        print(cyan + "estimateQualityClassification() : " + endC +
              "column_name_ref : " + str(column_name_ref) + endC)
        print(cyan + "estimateQualityClassification() : " + endC +
              "column_name_class : " + str(column_name_class) + endC)
        print(cyan + "estimateQualityClassification() : " + endC +
              "path_time_log : " + str(path_time_log) + endC)
        print(cyan + "estimateQualityClassification() : " + endC + "epsg  : " +
              str(epsg) + endC)
        print(cyan + "estimateQualityClassification() : " + endC +
              "format_raster : " + str(format_raster) + endC)
        print(cyan + "estimateQualityClassification() : " + endC +
              "format_vector : " + str(format_vector) + endC)
        print(cyan + "estimateQualityClassification() : " + endC +
              "extension_raster : " + str(extension_raster) + endC)
        print(cyan + "estimateQualityClassification() : " + endC +
              "extension_vector : " + str(extension_vector) + endC)
        print(cyan + "estimateQualityClassification() : " + endC +
              "save_results_intermediate : " + str(save_results_intermediate) +
              endC)
        print(cyan + "estimateQualityClassification() : " + endC +
              "overwrite : " + str(overwrite) + endC)

    # ETAPE 0 : PREPARATION DES FICHIERS INTERMEDIAIRES

    CODAGE = "uint16"

    SUFFIX_STUDY = '_study'
    SUFFIX_CUT = '_cut'
    SUFFIX_TEMP = '_temp'
    SUFFIX_SAMPLE = '_sample'

    repertory_output = os.path.dirname(vector_output)
    base_name = os.path.splitext(os.path.basename(vector_output))[0]

    vector_output_temp = repertory_output + os.sep + base_name + SUFFIX_TEMP + extension_vector
    raster_study = repertory_output + os.sep + base_name + SUFFIX_STUDY + extension_raster
    vector_study = repertory_output + os.sep + base_name + SUFFIX_STUDY + extension_vector
    raster_cut = repertory_output + os.sep + base_name + SUFFIX_CUT + extension_raster
    vector_sample_temp = repertory_output + os.sep + base_name + SUFFIX_SAMPLE + SUFFIX_TEMP + extension_vector

    # Mise à jour des noms de champs
    input_ref_col = ""
    val_ref = 0
    if (column_name_vector != "") and (not column_name_vector is None):
        input_ref_col = column_name_vector
    if (column_name_ref != "") and (not column_name_ref is None):
        val_ref_col = column_name_ref
    if (column_name_class != "") and (not column_name_class is None):
        val_class_col = column_name_class

    # ETAPE 1 : DEFINIR UN SHAPE ZONE D'ETUDE

    if (not vector_cut_input is None) and (vector_cut_input != "") and (
            os.path.isfile(vector_cut_input)):
        cutting_action = True
        vector_study = vector_cut_input

    else:
        cutting_action = False
        createVectorMask(image_input, vector_study)

    # ETAPE 2 : DECOUPAGE DU RASTEUR PAR LE VECTEUR D'EMPRISE SI BESOIN

    if cutting_action:
        # Identification de la tailles de pixels en x et en y
        pixel_size_x, pixel_size_y = getPixelWidthXYImage(image_input)

        # Si le fichier de sortie existe deja le supprimer
        if os.path.exists(raster_cut):
            removeFile(raster_cut)

        # Commande de découpe
        if not cutImageByVector(vector_study, image_input, raster_cut,
                                pixel_size_x, pixel_size_y, no_data_value, 0,
                                format_raster, format_vector):
            raise NameError(
                cyan + "estimateQualityClassification() : " + bold + red +
                "Une erreur c'est produite au cours du decoupage de l'image : "
                + image_input + endC)
        if debug >= 2:
            print(cyan + "estimateQualityClassification() : " + bold + green +
                  "DECOUPAGE DU RASTER %s AVEC LE VECTEUR %s" %
                  (image_input, vector_study) + endC)
    else:
        raster_cut = image_input

    # ETAPE 3 : CREATION DE LISTE POINTS AVEC DONNEE ISSU D'UN FICHIER RASTER

    # Gémotrie de l'image
    cols, rows, bands = getGeometryImage(raster_cut)
    xmin, xmax, ymin, ymax = getEmpriseImage(raster_cut)
    pixel_width, pixel_height = getPixelWidthXYImage(raster_cut)

    if debug >= 2:
        print("cols : " + str(cols))
        print("rows : " + str(rows))
        print("bands : " + str(bands))
        print("xmin : " + str(xmin))
        print("ymin : " + str(ymin))
        print("xmax : " + str(xmax))
        print("ymax : " + str(ymax))
        print("pixel_width : " + str(pixel_width))
        print("pixel_height : " + str(pixel_height))

    # ETAPE 3-1 : CAS CREATION D'UN FICHIER DE POINTS PAR TIRAGE ALEATOIRE DANS LA MATRICE IMAGE
    if (vector_sample_input is None) or (vector_sample_input == ""):
        is_sample_file = False

        # Les dimensions de l'image
        nb_pixels = abs(cols * rows)

        # Tirage aléatoire des points
        drawn_dot_list = []
        while len(drawn_dot_list) < nb_dot:
            val = random.randint(0, nb_pixels)
            if not val in drawn_dot_list:
                drawn_dot_list.append(val)

        # Creation d'un dico index valeur du tirage et attibuts pos_x, pos_y et value pixel
        points_random_value_dico = {}

        points_coordonnees_list = []
        for point in drawn_dot_list:
            pos_y = point // cols
            pos_x = point % cols
            coordonnees_list = [pos_x, pos_y]
            points_coordonnees_list.append(coordonnees_list)

        # Lecture dans le fichier raster des valeurs
        values_list = getPixelsValueListImage(raster_cut,
                                              points_coordonnees_list)
        print(values_list)
        for idx_point in range(len(drawn_dot_list)):
            val_class = values_list[idx_point]
            coordonnees_list = points_coordonnees_list[idx_point]
            pos_x = coordonnees_list[0]
            pos_y = coordonnees_list[1]
            coor_x = xmin + (pos_x * abs(pixel_width))
            coor_y = ymax - (pos_y * abs(pixel_height))
            point_attr_dico = {
                "Ident": idx_point,
                val_ref_col: int(val_ref),
                val_class_col: int(val_class)
            }
            points_random_value_dico[idx_point] = [[coor_x, coor_y],
                                                   point_attr_dico]

            if debug >= 4:
                print("idx_point : " + str(idx_point))
                print("pos_x : " + str(pos_x))
                print("pos_y : " + str(pos_y))
                print("coor_x : " + str(coor_x))
                print("coor_y : " + str(coor_y))
                print("val_class : " + str(val_class))
                print("")

    # ETAPE 3-2 : CAS D'UN FICHIER DE POINTS DEJA EXISTANT MISE A JOUR DE LA DONNEE ISSU Du RASTER
    else:
        # Le fichier de points d'analyses existe
        is_sample_file = True
        cutVectorAll(vector_study, vector_sample_input, vector_sample_temp,
                     format_vector)
        if input_ref_col != "":
            points_coordinates_dico = readVectorFilePoints(
                vector_sample_temp, [input_ref_col], format_vector)
        else:
            points_coordinates_dico = readVectorFilePoints(
                vector_sample_temp, [], format_vector)

        # Création du dico
        points_random_value_dico = {}

        points_coordonnees_list = []
        for index_key in points_coordinates_dico:
            # Recuperer les valeurs des coordonnees
            coord_info_list = points_coordinates_dico[index_key]
            coor_x = coord_info_list[0]
            coor_y = coord_info_list[1]
            pos_x = int(round((coor_x - xmin) / abs(pixel_width)))
            pos_y = int(round((ymax - coor_y) / abs(pixel_height)))
            coordonnees_list = [pos_x, pos_y]
            points_coordonnees_list.append(coordonnees_list)

        # Lecture dans le fichier raster des valeurs
        values_list = getPixelsValueListImage(raster_cut,
                                              points_coordonnees_list)

        for index_key in points_coordinates_dico:
            # Récuperer les valeurs des coordonnees
            coord_info_list = points_coordinates_dico[index_key]
            coor_x = coord_info_list[0]
            coor_y = coord_info_list[1]
            # Récupérer la classe de référence dans le vecteur d'entrée
            if input_ref_col != "":
                label = coord_info_list[2]
                val_ref = label.get(input_ref_col)
            # Récupérer la classe issue du raster d'entrée
            val_class = values_list[index_key]
            # Création du dico contenant identifiant du point, valeur de référence, valeur du raster d'entrée
            point_attr_dico = {
                "Ident": index_key,
                val_ref_col: int(val_ref),
                val_class_col: int(val_class)
            }
            if debug >= 4:
                print("point_attr_dico: " + str(point_attr_dico))
            points_random_value_dico[index_key] = [[coor_x, coor_y],
                                                   point_attr_dico]

    # ETAPE 4 : CREATION ET DECOUPAGE DU FICHIER VECTEUR RESULTAT PAR LE SHAPE D'ETUDE

    # Creer le fichier de points
    if is_sample_file and os.path.exists(vector_sample_temp):

        attribute_dico = {val_class_col: ogr.OFTInteger}
        # Recopie du fichier
        removeVectorFile(vector_output_temp)
        copyVectorFile(vector_sample_temp, vector_output_temp)

        # Ajout des champs au fichier de sortie
        for field_name in attribute_dico:
            addNewFieldVector(vector_output_temp, field_name,
                              attribute_dico[field_name], 0, None, None,
                              format_vector)

        # Préparation des donnees
        field_new_values_list = []
        for index_key in points_random_value_dico:
            point_attr_dico = points_random_value_dico[index_key][1]
            point_attr_dico.pop(val_ref_col, None)
            field_new_values_list.append(point_attr_dico)

        # Ajout des donnees
        setAttributeValuesList(vector_output_temp, field_new_values_list,
                               format_vector)

    else:
        # Définir les attibuts du fichier résultat
        attribute_dico = {
            "Ident": ogr.OFTInteger,
            val_ref_col: ogr.OFTInteger,
            val_class_col: ogr.OFTInteger
        }

        createPointsFromCoordList(attribute_dico, points_random_value_dico,
                                  vector_output_temp, epsg, format_vector)

    # Découpage du fichier de points d'echantillons
    cutVectorAll(vector_study, vector_output_temp, vector_output,
                 format_vector)

    # ETAPE 5 : SUPPRESIONS FICHIERS INTERMEDIAIRES INUTILES

    # Suppression des données intermédiaires
    if not save_results_intermediate:
        if cutting_action:
            removeFile(raster_cut)
        else:
            removeVectorFile(vector_study)
            removeFile(raster_study)
        if is_sample_file:
            removeVectorFile(vector_sample_temp)
        removeVectorFile(vector_output_temp)

    print(endC)
    print(bold + green +
          "## END : CREATE PRINT POINTS FILE FROM CLASSIF IMAGE" + endC)
    print(endC)

    # Mise à jour du Log
    ending_event = "estimateQualityClassification() : Masks creation ending : "
    timeLine(path_time_log, ending_event)

    return
def createEmprise(input_dir,
                  output_file,
                  is_not_assembled,
                  is_all_polygons_used,
                  is_not_date,
                  is_optimize_emprise,
                  is_optimize_emprise_nodata,
                  no_data_value,
                  size_erode,
                  path_time_log,
                  separ_name="_",
                  pos_date=1,
                  nb_char_date=8,
                  separ_date="",
                  epsg=2154,
                  format_vector='ESRI Shapefile',
                  extension_raster=".tif",
                  extension_vector=".shp",
                  save_results_intermediate=False,
                  overwrite=True):

    # Affichage des paramètres
    if debug >= 3:
        print(bold + green +
              "Variables dans le createEmprise - Variables générales" + endC)
        print(cyan + "createEmprise() : " + endC + "input_dir : " +
              str(input_dir))
        print(cyan + "createEmprise() : " + endC + "output_file : " +
              str(output_file))
        print(cyan + "createEmprise() : " + endC + "is_not_assembled : " +
              str(is_not_assembled))
        print(cyan + "createEmprise() : " + endC + "is_all_polygons_used : " +
              str(is_all_polygons_used))
        print(cyan + "createEmprise() : " + endC + "is_not_date : " +
              str(is_not_date))
        print(cyan + "createEmprise() : " + endC + "is_optimize_emprise : " +
              str(is_optimize_emprise))
        print(cyan + "createEmprise() : " + endC +
              "is_optimize_emprise_nodata : " +
              str(is_optimize_emprise_nodata))
        print(cyan + "createEmprise() : " + endC + "no_data_value : " +
              str(no_data_value))
        print(cyan + "createEmprise() : " + endC + "size_erode : " +
              str(size_erode))
        print(cyan + "createEmprise() : " + endC + "path_time_log : " +
              str(path_time_log))
        print(cyan + "createEmprise() : " + endC + "separ_name : " +
              str(separ_name))
        print(cyan + "createEmprise() : " + endC + "pos_date : " +
              str(pos_date))
        print(cyan + "createEmprise() : " + endC + "nb_char_date : " +
              str(nb_char_date))
        print(cyan + "createEmprise() : " + endC + "separ_date : " +
              str(separ_date))
        print(cyan + "createEmprise() : " + endC + "epsg : " + str(epsg))
        print(cyan + "createEmprise() : " + endC + "format_vector : " +
              str(format_vector))
        print(cyan + "createEmprise() : " + endC + "extension_raster : " +
              str(extension_raster) + endC)
        print(cyan + "createEmprise() : " + endC + "extension_vector : " +
              str(extension_vector) + endC)
        print(cyan + "createEmprise() : " + endC +
              "save_results_intermediate : " + str(save_results_intermediate))
        print(cyan + "createEmprise() : " + endC + "overwrite : " +
              str(overwrite))

# Constantes
    EXT_LIST_HDF5 = ['h5', 'H5', 'he5', 'HE5', 'hdf5', 'HDF5']
    EXT_LIST = EXT_LIST_HDF5 + [
        'tif', 'TIF', 'tiff', 'TIFF', 'ecw', 'ECW', 'jp2', 'JP2', 'dim', 'DIM',
        'asc', 'ASC'
    ]
    SUFFIX_DETAILLEE = "_detail"
    SUFFIX_MASK_ZERO = "_mask_zeros"
    SUFFIX_TMP = "_tmp"

    CODAGE_8B = "uint8"
    ATTR_NAME_ID = "Id"
    ATTR_NAME_NOMIMAGE = "NomImage"
    ATTR_NAME_DATEACQUI = "DateAcqui"
    ATTR_NAME_HEUREACQUI = "HeureAcqui"
    ATTR_NAME_REFDOSSIER = "RefDossier"

    # Variables
    points_list = []
    name_image_list = []
    name_rep_list = []
    ref_dossier_list = []
    date_list = []
    heure_list = []
    optimize_emprise_nodata_shape_list = []
    polygons_attr_coord_dico = {}
    pos_date = pos_date - 1

    repertory_output = os.path.dirname(output_file)
    file_name = os.path.splitext(os.path.basename(output_file))[0]
    extension = os.path.splitext(output_file)[1]
    file_vector_detail = repertory_output + os.sep + file_name + SUFFIX_DETAILLEE + extension

    # Si un fichier de sortie avec le même nom existe déjà, et si l'option ecrasement est à false, alors passe au masque suivant
    check = os.path.isfile(output_file)
    if check and not overwrite:
        print(
            bold + yellow + "createEmprise() : " + endC +
            "Le fichier vecteur d'emprise %s existe déjà : pas d'actualisation"
            % (output_file) + endC)
    # Si non, ou si la fonction ecrasement est désative, alors on le calcule
    else:
        if check:
            try:  # Suppression de l'éventuel fichier existant
                removeVectorFile(output_file)
                removeVectorFile(file_vector_detail)
            except Exception:
                pass  # Si le fichier ne peut pas être supprimé, on suppose qu'il n'existe pas et on passe à la suite

        # Récuperer tous les sous répertoires
        sub_rep_list = getSubRepRecursifList(input_dir)
        sub_rep_list.append(input_dir)

        # Parcours de chaque dossier image du dossier en entrée
        for repertory in sub_rep_list:
            if os.path.isdir(repertory):

                if debug >= 2:
                    print(cyan + "createEmprises() : " + endC + bold + green +
                          "Traitement de : " + endC + repertory)

                # Récupération des images du dossier en entrée
                imagettes_jp2_tif_ecw_list = []
                imagettes_list = os.listdir(repertory)

                for elt1 in imagettes_list:
                    path_image = repertory + os.sep + elt1
                    if (os.path.isfile(path_image)) and (len(
                            elt1.rsplit('.', 1)) == 2) and (elt1.rsplit(
                                '.', 1)[1] in EXT_LIST):
                        if elt1.rsplit('.', 1)[1] in EXT_LIST_HDF5:
                            elt1_new = os.path.splitext(
                                elt1)[0] + extension_raster
                            path_image_new = repertory + os.sep + elt1_new
                            h5ToGtiff(path_image, path_image_new)
                            imagettes_jp2_tif_ecw_list.append(elt1_new)
                        else:
                            imagettes_jp2_tif_ecw_list.append(elt1)

                # Pour le cas ou le repertoire contient des fichiers images
                if not imagettes_jp2_tif_ecw_list == []:

                    # Cas ou chaque emprise d'image est un polygone
                    if is_not_assembled or is_optimize_emprise or is_optimize_emprise_nodata:

                        for imagette in imagettes_jp2_tif_ecw_list:
                            # Récupération des emprises de l'image
                            path_image = repertory + os.sep + imagette
                            path_info_acquisition = repertory
                            xmin, xmax, ymin, ymax = getEmpriseImage(
                                path_image)
                            coord_list = [
                                xmin, ymax, xmax, ymax, xmax, ymin, xmin, ymin,
                                xmin, ymax
                            ]

                            # Saisie des données
                            points_list.append(coord_list)

                            # Récupération du nom de l'image pour la création des champs
                            input_image_name = os.path.splitext(
                                os.path.basename(path_image))[0]
                            name_image_list.append(input_image_name)

                            # Cas optimisation de l'emprise en elevant les nodata
                            if is_optimize_emprise_nodata:

                                path_info_acquisition = path_image
                                optimize_emprise_nodata_shape = repertory_output + os.sep + input_image_name + extension_vector
                                optimize_emprise_tmp1_shape = repertory_output + os.sep + input_image_name + SUFFIX_TMP + str(
                                    1) + extension_vector
                                optimize_emprise_tmp2_shape = repertory_output + os.sep + input_image_name + SUFFIX_TMP + str(
                                    2) + extension_vector
                                optimize_emprise_tmp3_shape = repertory_output + os.sep + input_image_name + SUFFIX_TMP + str(
                                    3) + extension_vector
                                optimize_emprise_tmp4_shape = repertory_output + os.sep + input_image_name + SUFFIX_TMP + str(
                                    4) + extension_vector
                                binary_mask_zeros_raster = repertory_output + os.sep + input_image_name + SUFFIX_MASK_ZERO + extension_raster
                                optimize_emprise_nodata_shape_list.append(
                                    optimize_emprise_nodata_shape)

                                # Création masque binaire pour séparer les no data des vraies valeurs
                                no_data_value_img = getNodataValueImage(
                                    path_image)
                                if no_data_value_img == None:
                                    no_data_value_img = no_data_value
                                createBinaryMaskMultiBand(
                                    path_image, binary_mask_zeros_raster,
                                    no_data_value_img, CODAGE_8B)

                                # Vectorisation du masque binaire true data/false data -> polygone avec uniquement les vraies valeurs
                                if os.path.exists(
                                        optimize_emprise_nodata_shape):
                                    removeVectorFile(
                                        optimize_emprise_nodata_shape)

                                polygonizeRaster(binary_mask_zeros_raster,
                                                 optimize_emprise_tmp1_shape,
                                                 input_image_name,
                                                 ATTR_NAME_ID, format_vector)

                                # Nettoyage des polygones parasites pour ne garder que le polygone pricipale si l'option "all" n'est pas demandée
                                if not is_all_polygons_used:
                                    geometry_list = getGeomPolygons(
                                        optimize_emprise_tmp1_shape, None,
                                        None, format_vector)
                                    geometry_orded_dico = {}
                                    geometry_orded_list = []
                                    for geometry in geometry_list:
                                        area = geometry.GetArea()
                                        geometry_orded_dico[area] = geometry
                                        geometry_orded_list.append(area)
                                    geometry_orded_list.sort()
                                    if len(geometry_orded_list) > 0:
                                        max_area = geometry_orded_list[
                                            len(geometry_orded_list) - 1]
                                        geom_max = geometry_orded_dico[
                                            max_area]
                                        attribute_dico = {
                                            ATTR_NAME_ID: ogr.OFTInteger
                                        }
                                        polygons_attr_geom_dico = {}
                                        polygons_attr_geom_dico[str(1)] = [
                                            geom_max, {
                                                ATTR_NAME_ID: str(1)
                                            }
                                        ]
                                        createPolygonsFromGeometryList(
                                            attribute_dico,
                                            polygons_attr_geom_dico,
                                            optimize_emprise_tmp2_shape, epsg,
                                            format_vector)
                                    else:
                                        print(
                                            cyan + "createEmprise() : " +
                                            bold + yellow +
                                            " Attention!!! Fichier non traite (ne contient pas de polygone): "
                                            + optimize_emprise_tmp1_shape +
                                            endC)
                                        optimize_emprise_tmp2_shape = optimize_emprise_tmp1_shape
                                else:
                                    optimize_emprise_tmp2_shape = optimize_emprise_tmp1_shape

                                # Nettoyage des polygones simplification et supression des trous
                                cleanRingVector(optimize_emprise_tmp2_shape,
                                                optimize_emprise_tmp3_shape,
                                                format_vector)
                                simplifyVector(optimize_emprise_tmp3_shape,
                                               optimize_emprise_tmp4_shape, 2,
                                               format_vector)
                                if size_erode != 0.0:
                                    bufferVector(
                                        optimize_emprise_tmp4_shape,
                                        optimize_emprise_nodata_shape,
                                        size_erode * -1, "", 1.0, 10,
                                        format_vector)
                                else:
                                    copyVectorFile(
                                        optimize_emprise_tmp4_shape,
                                        optimize_emprise_nodata_shape,
                                        format_vector)

                                # Nettoyage des fichier intermediaires
                                if not save_results_intermediate:
                                    removeFile(binary_mask_zeros_raster)
                                    removeVectorFile(
                                        optimize_emprise_tmp1_shape)
                                    removeVectorFile(
                                        optimize_emprise_tmp2_shape)
                                    removeVectorFile(
                                        optimize_emprise_tmp3_shape)
                                    removeVectorFile(
                                        optimize_emprise_tmp4_shape)

                            # Recuperation de la date et l'heure d'acquisition
                            # Gestion de l'emprise optimisé nodata on utilise le nom de l'image pour la date d'acquisition sion c'est le nom du repertoire
                            getDataToFiels(
                                path_info_acquisition, is_not_date,
                                is_optimize_emprise
                                or is_optimize_emprise_nodata, separ_name,
                                pos_date, nb_char_date, separ_date,
                                points_list, ref_dossier_list, name_rep_list,
                                date_list, heure_list)

                    # Cas ou l'on prend l'emprise globale des images un seul plolygone correspondant a l'emprise globale
                    else:

                        # Récupération des emprises des images du dossier
                        liste_x_l = []
                        liste_y_b = []
                        liste_x_r = []
                        liste_y_t = []

                        for imagette in imagettes_jp2_tif_ecw_list:
                            path_image = repertory + os.sep + imagette
                            xmin, xmax, ymin, ymax = getEmpriseImage(
                                path_image)

                            liste_x_l.append(xmin)
                            liste_x_r.append(xmax)
                            liste_y_b.append(ymin)
                            liste_y_t.append(ymax)

                        # Récupération des min et max de la liste des imagettes
                        # Coin haut gauche
                        xmin_l_t = str(min(liste_x_l))

                        # Coin bas gauche
                        ymin_l_b = str(min(liste_y_b))
                        xmin_l_b = xmin_l_t

                        # Coin bas doite
                        xmax_r_b = str(max(liste_x_r))

                        # Coin haut droite
                        ymax_r_t = str(max(liste_y_t))
                        xmax_r_t = xmax_r_b
                        ymax_r_b = ymin_l_b
                        ymin_l_t = ymax_r_t

                        coord_list = [
                            xmin_l_t, ymin_l_t, xmin_l_b, ymin_l_b, xmax_r_b,
                            ymax_r_b, xmax_r_t, ymax_r_t, xmin_l_t, ymin_l_t
                        ]
                        points_list.append(coord_list)

                        # Récupération du nom du répertoire pour création des champs
                        getDataToFiels(repertory, is_not_date,
                                       is_optimize_emprise, separ_name,
                                       pos_date, nb_char_date, separ_date,
                                       points_list, ref_dossier_list,
                                       name_rep_list, date_list, heure_list)

        #  Préparation des attribute_dico et polygons_attr_coord_dico
        if is_not_assembled:
            attribute_dico = {
                ATTR_NAME_ID: ogr.OFTInteger,
                ATTR_NAME_NOMIMAGE: ogr.OFTString,
                ATTR_NAME_DATEACQUI: ogr.OFTDate,
                ATTR_NAME_HEUREACQUI: ogr.OFTString
            }

            for i in range(len(points_list)):
                polygons_attr_coord_dico[str(i)] = [
                    points_list[i], {
                        ATTR_NAME_ID: i + 1,
                        ATTR_NAME_NOMIMAGE: name_image_list[i],
                        ATTR_NAME_DATEACQUI: date_list[i],
                        ATTR_NAME_HEUREACQUI: heure_list[i]
                    }
                ]

        else:
            attribute_dico = {
                ATTR_NAME_NOMIMAGE: ogr.OFTString,
                ATTR_NAME_REFDOSSIER: ogr.OFTString,
                ATTR_NAME_DATEACQUI: ogr.OFTDate,
                ATTR_NAME_HEUREACQUI: ogr.OFTString
            }

            for i in range(len(points_list)):
                polygons_attr_coord_dico[str(i)] = [
                    points_list[i], {
                        ATTR_NAME_NOMIMAGE: name_rep_list[i],
                        ATTR_NAME_REFDOSSIER: ref_dossier_list[i],
                        ATTR_NAME_DATEACQUI: date_list[i],
                        ATTR_NAME_HEUREACQUI: heure_list[i]
                    }
                ]

        # Cas optimisation de l'emprise en elevant les nodata
        colum = ""
        if is_optimize_emprise_nodata:

            if is_not_assembled:
                file_vector = output_file
            else:
                file_vector = file_vector_detail

            # Fusion des polygones d'emprises images optimisées sans nodata
            polygons_attr_geom_dico = {}
            i = 0
            for shape_file in optimize_emprise_nodata_shape_list:
                geom_list = getGeomPolygons(shape_file, ATTR_NAME_ID, 1,
                                            format_vector)
                if not is_all_polygons_used:
                    if geom_list is not None and len(geom_list) > 0:
                        geom = geom_list[0]
                        polygons_attr_geom_dico[str(i)] = [
                            geom, polygons_attr_coord_dico[str(i)][1]
                        ]
                else:
                    j = 1
                    for geom in geom_list:
                        polygons_attr_geom_dico[str(i + 1000000 * j)] = [
                            geom, polygons_attr_coord_dico[str(i)][1]
                        ]
                        j += 1
                i += 1

            createPolygonsFromGeometryList(attribute_dico,
                                           polygons_attr_geom_dico,
                                           file_vector, epsg, format_vector)

            # Suppression des fichiers intermediaires
            if not save_results_intermediate:
                for vector_to_del in optimize_emprise_nodata_shape_list:
                    removeVectorFile(vector_to_del)

        else:
            # Utilisation de createPolygonsFromCoordList()
            if is_optimize_emprise:
                file_vector = file_vector_detail
            else:
                file_vector = output_file

            # Creation des polygones a partir de la liste des coordonnées des emprises
            createPolygonsFromCoordList(attribute_dico,
                                        polygons_attr_coord_dico, file_vector,
                                        epsg, format_vector)

        # Cas fusion des polygones pour avoir une emprise constituée d'un seul polygone
        if not is_not_assembled:
            if is_optimize_emprise or is_optimize_emprise_nodata or is_all_polygons_used:
                column_name = ""
                if is_all_polygons_used:
                    column_name = ATTR_NAME_DATEACQUI
                elif is_optimize_emprise or is_optimize_emprise_nodata:
                    column_name = ATTR_NAME_NOMIMAGE

                # Fusion des polygones
                if is_all_polygons_used and is_not_date:
                    fusionNeighbourPolygonsBySameValue(file_vector,
                                                       output_file,
                                                       column_name,
                                                       format_vector)
                    #dissolveVector(file_vector, output_file, column_name, format_vector)
                else:
                    if not geometries2multigeometries(file_vector, output_file,
                                                      column_name,
                                                      format_vector):
                        copyVectorFile(file_vector, output_file, format_vector)

                # Suppression des fichiers intermediaires
                if not save_results_intermediate:
                    removeVectorFile(file_vector_detail)

    return
def createMacroSamples(image_input, vector_to_cut_input, vector_sample_output, raster_sample_output, bd_vector_input_list, bd_buff_list, sql_expression_list, path_time_log, macro_sample_name="", simplify_vector_param=10.0, format_vector='ESRI Shapefile', extension_vector=".shp", save_results_intermediate=False, overwrite=True) :

    # Mise à jour du Log
    starting_event = "createMacroSamples() : create macro samples starting : "
    timeLine(path_time_log,starting_event)

    if debug >= 3:
        print(bold + green + "createMacroSamples() : Variables dans la fonction" + endC)
        print(cyan + "createMacroSamples() : " + endC + "image_input : " + str(image_input) + endC)
        print(cyan + "createMacroSamples() : " + endC + "vector_to_cut_input : " + str(vector_to_cut_input) + endC)
        print(cyan + "createMacroSamples() : " + endC + "vector_sample_output : " + str(vector_sample_output) + endC)
        print(cyan + "createMacroSamples() : " + endC + "raster_sample_output : " + str(raster_sample_output) + endC)
        print(cyan + "createMacroSamples() : " + endC + "bd_vector_input_list : " + str(bd_vector_input_list) + endC)
        print(cyan + "createMacroSamples() : " + endC + "bd_buff_list : " + str(bd_buff_list) + endC)
        print(cyan + "createMacroSamples() : " + endC + "sql_expression_list : " + str(sql_expression_list) + endC)
        print(cyan + "createMacroSamples() : " + endC + "path_time_log : " + str(path_time_log) + endC)
        print(cyan + "createMacroSamples() : " + endC + "macro_sample_name : " + str(macro_sample_name) + endC)
        print(cyan + "createMacroSamples() : " + endC + "simplify_vector_param : " + str(simplify_vector_param) + endC)
        print(cyan + "createMacroSamples() : " + endC + "format_vector : " + str(format_vector))
        print(cyan + "createMacroSamples() : " + endC + "extension_vector : " + str(extension_vector) + endC)
        print(cyan + "createMacroSamples() : " + endC + "save_results_intermediate : " + str(save_results_intermediate) + endC)
        print(cyan + "createMacroSamples() : " + endC + "overwrite : " + str(overwrite) + endC)

    # Constantes
    FOLDER_MASK_TEMP = "Mask_"
    FOLDER_CUTTING_TEMP = "Cut_"
    FOLDER_FILTERING_TEMP = "Filter_"
    FOLDER_BUFF_TEMP = "Buff_"

    SUFFIX_MASK_CRUDE = "_crude"
    SUFFIX_MASK = "_mask"
    SUFFIX_VECTOR_CUT = "_cut"
    SUFFIX_VECTOR_FILTER = "_filt"
    SUFFIX_VECTOR_BUFF = "_buff"

    CODAGE = "uint8"

    # ETAPE 1 : NETTOYER LES DONNEES EXISTANTES

    print(cyan + "createMacroSamples() : " + bold + green + "Nettoyage de l'espace de travail..." + endC)

    # Nom du repertoire de calcul
    repertory_macrosamples_output = os.path.dirname(vector_sample_output)

    # Test si le vecteur echantillon existe déjà et si il doit être écrasés
    check = os.path.isfile(vector_sample_output) or os.path.isfile(raster_sample_output)

    if check and not overwrite: # Si les fichiers echantillons existent deja et que overwrite n'est pas activé
        print(bold + yellow + "File sample : " + vector_sample_output + " already exists and will not be created again." + endC)
    else :
        if check:
            try:
                removeVectorFile(vector_sample_output)
                removeFile(raster_sample_output)
            except Exception:
                pass # si le fichier n'existe pas, il ne peut pas être supprimé : cette étape est ignorée

        # Définition des répertoires temporaires
        repertory_mask_temp = repertory_macrosamples_output + os.sep + FOLDER_MASK_TEMP + macro_sample_name
        repertory_samples_cutting_temp = repertory_macrosamples_output + os.sep + FOLDER_CUTTING_TEMP + macro_sample_name
        repertory_samples_filtering_temp = repertory_macrosamples_output + os.sep + FOLDER_FILTERING_TEMP + macro_sample_name
        repertory_samples_buff_temp = repertory_macrosamples_output + os.sep + FOLDER_BUFF_TEMP + macro_sample_name

        if debug >= 4:
            print(cyan + "createMacroSamples() : " + endC + "Création du répertoire : " + str(repertory_mask_temp))
            print(cyan + "createMacroSamples() : " + endC + "Création du répertoire : " + str(repertory_samples_cutting_temp))
            print(cyan + "createMacroSamples() : " + endC + "Création du répertoire : " + str(repertory_samples_buff_temp))

        # Création des répertoires temporaire qui n'existent pas
        if not os.path.isdir(repertory_macrosamples_output):
            os.makedirs(repertory_macrosamples_output)
        if not os.path.isdir(repertory_mask_temp):
            os.makedirs(repertory_mask_temp)
        if not os.path.isdir(repertory_samples_cutting_temp):
            os.makedirs(repertory_samples_cutting_temp)
        if not os.path.isdir(repertory_samples_filtering_temp):
            os.makedirs(repertory_samples_filtering_temp)
        if not os.path.isdir(repertory_samples_buff_temp):
            os.makedirs(repertory_samples_buff_temp)

        # Nettoyage des répertoires temporaire qui ne sont pas vide
        cleanTempData(repertory_mask_temp)
        cleanTempData(repertory_samples_cutting_temp)
        cleanTempData(repertory_samples_filtering_temp)
        cleanTempData(repertory_samples_buff_temp)

        print(cyan + "createMacroSamples() : " + bold + green + "... fin du nettoyage" + endC)

        # ETAPE 2 : DECOUPAGE DES VECTEURS

        print(cyan + "createMacroSamples() : " + bold + green + "Decoupage des echantillons ..." + endC)

        if vector_to_cut_input == None :
            # 2.1 : Création du masque délimitant l'emprise de la zone par image
            image_name = os.path.splitext(os.path.basename(image_input))[0]
            vector_mask = repertory_mask_temp + os.sep + image_name + SUFFIX_MASK_CRUDE + extension_vector
            createVectorMask(image_input, vector_mask)

            # 2.2 : Simplification du masque
            vector_simple_mask = repertory_mask_temp + os.sep + image_name + SUFFIX_MASK + extension_vector
            simplifyVector(vector_mask, vector_simple_mask, simplify_vector_param, format_vector)
        else :
            vector_simple_mask = vector_to_cut_input

        # 2.3 : Découpage des vecteurs de bd exogenes avec le masque
        vectors_cut_list = []
        for vector_input in bd_vector_input_list :
            vector_name = os.path.splitext(os.path.basename(vector_input))[0]
            vector_cut = repertory_samples_cutting_temp + os.sep + vector_name + SUFFIX_VECTOR_CUT + extension_vector
            vectors_cut_list.append(vector_cut)
        cutoutVectors(vector_simple_mask, bd_vector_input_list, vectors_cut_list, format_vector)

        print(cyan + "createMacroSamples() : " + bold + green + "... fin du decoupage" + endC)

        # ETAPE 3 : FILTRAGE DES VECTEURS

        print(cyan + "createMacroSamples() : " + bold + green + "Filtrage des echantillons ..." + endC)

        vectors_filtered_list = []
        if sql_expression_list != [] :
            for idx_vector in range (len(bd_vector_input_list)):
                vector_name = os.path.splitext(os.path.basename(bd_vector_input_list[idx_vector]))[0]
                vector_cut = vectors_cut_list[idx_vector]
                if idx_vector < len(sql_expression_list) :
                    sql_expression = sql_expression_list[idx_vector]
                else :
                    sql_expression = ""
                vector_filtered = repertory_samples_filtering_temp + os.sep + vector_name + SUFFIX_VECTOR_FILTER + extension_vector
                vectors_filtered_list.append(vector_filtered)

                # Filtrage par ogr2ogr
                if sql_expression != "":
                    names_attribut_list = getAttributeNameList(vector_cut, format_vector)
                    column = "'"
                    for name_attribut in names_attribut_list :
                        column += name_attribut + ", "
                    column = column[0:len(column)-2]
                    column += "'"
                    ret = filterSelectDataVector(vector_cut, vector_filtered, column, sql_expression, format_vector)
                    if not ret :
                        print(cyan + "createMacroSamples() : " + bold + yellow + "Attention problème lors du filtrage des BD vecteurs l'expression SQL %s est incorrecte" %(sql_expression) + endC)
                        copyVectorFile(vector_cut, vector_filtered)
                else :
                    print(cyan + "createMacroSamples() : " + bold + yellow + "Pas de filtrage sur le fichier du nom : " + endC + vector_filtered)
                    copyVectorFile(vector_cut, vector_filtered)

        else :
            print(cyan + "createMacroSamples() : " + bold + yellow + "Pas de filtrage demandé" + endC)
            for idx_vector in range (len(bd_vector_input_list)):
                vector_cut = vectors_cut_list[idx_vector]
                vectors_filtered_list.append(vector_cut)

        print(cyan + "createMacroSamples() : " + bold + green + "... fin du filtrage" + endC)

        # ETAPE 4 : BUFFERISATION DES VECTEURS

        print(cyan + "createMacroSamples() : " + bold + green + "Mise en place des tampons..." + endC)

        vectors_buffered_list = []
        if bd_buff_list != [] :
            # Parcours des vecteurs d'entrée
            for idx_vector in range (len(bd_vector_input_list)):
                vector_name = os.path.splitext(os.path.basename(bd_vector_input_list[idx_vector]))[0]
                buff = bd_buff_list[idx_vector]
                vector_filtered = vectors_filtered_list[idx_vector]
                vector_buffered = repertory_samples_buff_temp + os.sep + vector_name + SUFFIX_VECTOR_BUFF + extension_vector

                if buff != 0:
                    if os.path.isfile(vector_filtered):
                        if debug >= 3:
                            print(cyan + "createMacroSamples() : " + endC + "vector_filtered : " + str(vector_filtered) + endC)
                            print(cyan + "createMacroSamples() : " + endC + "vector_buffered : " + str(vector_buffered) + endC)
                            print(cyan + "createMacroSamples() : " + endC + "buff : " + str(buff) + endC)
                        bufferVector(vector_filtered, vector_buffered, buff, "", 1.0, 10, format_vector)
                    else :
                        print(cyan + "createMacroSamples() : " + bold + yellow + "Pas de fichier du nom : " + endC + vector_filtered)

                else :
                    print(cyan + "createMacroSamples() : " + bold + yellow + "Pas de tampon sur le fichier du nom : " + endC + vector_filtered)
                    copyVectorFile(vector_filtered, vector_buffered)

                vectors_buffered_list.append(vector_buffered)

        else :
            print(cyan + "createMacroSamples() : " + bold + yellow + "Pas de tampon demandé" + endC)
            for idx_vector in range (len(bd_vector_input_list)):
                vector_filtered = vectors_filtered_list[idx_vector]
                vectors_buffered_list.append(vector_filtered)

        print(cyan + "createMacroSamples() : " + bold + green + "... fin de la mise en place des tampons" + endC)

        # ETAPE 5 : FUSION DES SHAPES

        print(cyan + "createMacroSamples() : " + bold + green + "Fusion par macroclasse ..." + endC)

        # si une liste de fichier shape à fusionner existe
        if not vectors_buffered_list:
            print(cyan + "createMacroSamples() : " + bold + yellow + "Pas de fusion sans donnee à fusionner" + endC)
        # s'il n'y a qu'un fichier shape en entrée
        elif len(vectors_buffered_list) == 1:
            print(cyan + "createMacroSamples() : " + bold + yellow + "Pas de fusion pour une seule donnee à fusionner" + endC)
            copyVectorFile(vectors_buffered_list[0], vector_sample_output)
        else :
            # Fusion des fichiers shape
            vectors_buffered_controled_list = []
            for vector_buffered in vectors_buffered_list :
                if os.path.isfile(vector_buffered) and (getGeometryType(vector_buffered, format_vector) in ('POLYGON', 'MULTIPOLYGON')) and (getNumberFeature(vector_buffered, format_vector) > 0):
                    vectors_buffered_controled_list.append(vector_buffered)
                else :
                    print(cyan + "createMacroSamples() : " + bold + red + "Attention fichier bufferisé est vide il ne sera pas fusionné : " + endC + vector_buffered, file=sys.stderr)

            fusionVectors(vectors_buffered_controled_list, vector_sample_output, format_vector)

        print(cyan + "createMacroSamples() : " + bold + green + "... fin de la fusion" + endC)

    # ETAPE 6 : CREATION DU FICHIER RASTER RESULTAT SI DEMANDE

    # Creation d'un masque binaire
    if raster_sample_output != "" and image_input != "" :
        repertory_output = os.path.dirname(raster_sample_output)
        if not os.path.isdir(repertory_output):
            os.makedirs(repertory_output)
        rasterizeBinaryVector(vector_sample_output, image_input, raster_sample_output, 1, CODAGE)

    # ETAPE 7 : SUPPRESIONS FICHIERS INTERMEDIAIRES INUTILES

    # Suppression des données intermédiaires
    if not save_results_intermediate:

        # Supression du fichier de decoupe si celui ci a été créer
        if vector_simple_mask != vector_to_cut_input :
            if os.path.isfile(vector_simple_mask) :
                removeVectorFile(vector_simple_mask)

        # Suppression des repertoires temporaires
        deleteDir(repertory_mask_temp)
        deleteDir(repertory_samples_cutting_temp)
        deleteDir(repertory_samples_filtering_temp)
        deleteDir(repertory_samples_buff_temp)

    # Mise à jour du Log
    ending_event = "createMacroSamples() : create macro samples ending : "
    timeLine(path_time_log,ending_event)

    return