def segmentImage(image_input, segmented_vector_output, segmentation_mode, sms_parametres_struct, srm_parametres_struct, path_time_log, ram_otb=0, format_vector='ESRI Shapefile', extension_vector=".shp", save_results_intermediate=False, overwrite=True): # Mise à jour du Log starting_event = "segmentImage() : Segment image starting : " timeLine(path_time_log,starting_event) print(endC) print(bold + green + "## START : SEGMENTATION" + endC) print(endC) CODAGE = "uint16" if debug >= 2: print(bold + green + "segmentImage() : Variables dans la fonction" + endC) print(cyan + "segmentImage() : " + endC + "image_input : " + str(image_input) + endC) print(cyan + "segmentImage() : " + endC + "segmented_vector_output : " + str(segmented_vector_output) + endC) print(cyan + "segmentImage() : " + endC + "segmentation_mode : " + str(segmentation_mode) + endC) print(cyan + "segmentImage() : " + endC + "path_time_log : " + str(path_time_log) + endC) print(cyan + "segmentImage() : " + endC + "ram_otb : " + str(ram_otb) + endC) print(cyan + "segmentImage() : " + endC + "format_vector : " + str(format_vector) + endC) print(cyan + "segmentImage() : " + endC + "extension_vector : " + str(extension_vector) + endC) print(cyan + "segmentImage() : " + endC + "save_results_intermediate : " + str(save_results_intermediate) + endC) print(cyan + "segmentImage() : " + endC + "overwrite : " + str(overwrite) + endC) # Vérification de l'existence d'une image segmentée check = os.path.isfile(segmented_vector_output) # Si oui et si la vérification est activée, passage à l'étape suivante if check and not overwrite : print(cyan + "segmentImage() : " + bold + green + "Image already segmented" + "." + endC) # Si non ou si la vérification est désactivée, application du filtre else: # Tentative de suppresion du fichier try: removeVectorFile(segmented_vector_output, format_vector=format_vector) except Exception: # Ignore l'exception levée si le fichier n'existe pas (et ne peut donc pas être supprimé) pass if debug >= 3: print(cyan + "segmentImage() : " + bold + green + "Applying segemened image", "..." , '\n' + endC) # Segmentation : if segmentation_mode.lower() == "sms" : # Par otbcli_LargeScaleMeanShift command = "otbcli_LargeScaleMeanShift -in %s -spatialr %d -ranger %f -minsize %d -tilesizex %d -tilesizey %d -mode.vector.out %s" %(image_input, sms_parametres_struct.spatial_radius, sms_parametres_struct.range_radius, sms_parametres_struct.min_segement_size, sms_parametres_struct.tile_size, sms_parametres_struct.tile_size, segmented_vector_output) if ram_otb > 0: command += " -ram %d" %(ram_otb) if debug >=2: print(cyan + "segmentImage() : " + bold + green + "Debut de la segmentation de l'image" + endC) print(command) exitCode = os.system(command) if exitCode != 0: print(command) raise NameError(cyan + "segmentImage() : " + bold + red + "An error occured during otbcli_LSMSSegmentation command. See error message above.") print('\n' + cyan + "segmentImage() : " + bold + green + "Segmentation applied!" + endC) if segmentation_mode.lower() == "srm" : # Par otbcli_GenericRegionMerging repertory_output = os.path.dirname(segmented_vector_output) layer_name = os.path.splitext(os.path.basename(segmented_vector_output))[0] segmented_raster_tmp = repertory_output + os.sep + os.path.splitext(os.path.basename(segmented_vector_output))[0] + os.path.splitext(os.path.basename(image_input))[1] command = "otbcli_GenericRegionMerging -in %s -criterion %s -threshold %f -niter %d -speed %d -cw %f -sw %f -out %s %s" %(image_input, srm_parametres_struct.homogeneity_criterion, srm_parametres_struct.threshol_criterion, srm_parametres_struct.number_iteration, srm_parametres_struct.segmentation_speed, srm_parametres_struct.weight_spectral_homogeneity, srm_parametres_struct.weight_spatial_homogeneity, segmented_raster_tmp, CODAGE) """ if ram_otb > 0: command += " -ram %d" %(ram_otb) """ if debug >=2: print(cyan + "segmentImage() : " + bold + green + "Debut de la segmentation de l'image" + endC) print(command) exitCode = os.system(command) if exitCode != 0: print(command) raise NameError(cyan + "segmentImage() : " + bold + red + "An error occured during otbcli_LSMSSegmentation command. See error message above.") print('\n' + cyan + "segmentImage() : " + bold + green + "Segmentation applied!" + endC) # Vectorisation du resultat de segmentation par gdal command = "gdal_polygonize.py %s -f \"%s\" %s %s ID" %(segmented_raster_tmp, format_vector, segmented_vector_output, layer_name) if debug >=2: print(command) exitCode = os.system(command) if exitCode != 0: print(command) raise NameError(cyan + "segmentImage() : " + bold + red + "An error occured during gdal_polygonize command. See error message above.") print('\n' + cyan + "segmentImage() : " + bold + green + "Filter applied!" + endC) # Suppression des données intermédiaires if not save_results_intermediate: # Supression du fichier temporaire de segmentation if os.path.isfile(segmented_raster_tmp) : removeFile(segmented_raster_tmp) print(endC) print(bold + green + "## END : SEGMENTATION" + endC) print(endC) # Mise à jour du Log ending_event = "segmentImage() : Segment image ending : " timeLine(path_time_log,ending_event) return
def comparareClassificationToReferenceGrid(image_input, vector_cut_input, vector_sample_input, vector_grid_input, vector_grid_output, size_grid, field_value_verif, no_data_value, path_time_log, epsg=2154, format_raster='GTiff', format_vector="ESRI Shapefile", extension_raster=".tif", extension_vector=".shp", save_results_intermediate=False, overwrite=True): # Mise à jour du Log starting_event = "comparareClassificationToReferenceGrid() : starting : " timeLine(path_time_log, starting_event) print(endC) print(bold + green + "## START : COMPARE QUALITY FROM CLASSIF IMAGE BY GRID" + endC) print(endC) if debug >= 2: print( bold + green + "comparareClassificationToReferenceGrid() : Variables dans la fonction" + endC) print(cyan + "comparareClassificationToReferenceGrid() : " + endC + "image_input : " + str(image_input) + endC) print(cyan + "comparareClassificationToReferenceGrid() : " + endC + "vector_cut_input : " + str(vector_cut_input) + endC) print(cyan + "comparareClassificationToReferenceGrid() : " + endC + "vector_sample_input : " + str(vector_sample_input) + endC) print(cyan + "comparareClassificationToReferenceGrid() : " + endC + "vector_grid_input : " + str(vector_grid_input) + endC) print(cyan + "comparareClassificationToReferenceGrid() : " + endC + "vector_grid_output : " + str(vector_grid_output) + endC) print(cyan + "comparareClassificationToReferenceGrid() : " + endC + "size_grid : " + str(size_grid) + endC) print(cyan + "comparareClassificationToReferenceGrid() : " + endC + "field_value_verif : " + str(field_value_verif)) print(cyan + "comparareClassificationToReferenceGrid() : " + endC + "no_data_value : " + str(no_data_value)) print(cyan + "comparareClassificationToReferenceGrid() : " + endC + "path_time_log : " + str(path_time_log) + endC) print(cyan + "comparareClassificationToReferenceGrid() : " + endC + "epsg : " + str(epsg) + endC) print(cyan + "comparareClassificationToReferenceGrid() : " + endC + "format_raster : " + str(format_raster) + endC) print(cyan + "comparareClassificationToReferenceGrid() : " + endC + "format_vector : " + str(format_vector) + endC) print(cyan + "comparareClassificationToReferenceGrid() : " + endC + "extension_raster : " + str(extension_raster) + endC) print(cyan + "comparareClassificationToReferenceGrid() : " + endC + "extension_vector : " + str(extension_vector) + endC) print(cyan + "comparareClassificationToReferenceGrid() : " + endC + "save_results_intermediate : " + str(save_results_intermediate) + endC) print(cyan + "comparareClassificationToReferenceGrid() : " + endC + "overwrite : " + str(overwrite) + endC) # ETAPE 0 : PREPARATION DES FICHIERS INTERMEDIAIRES' CODAGE = "uint16" SUFFIX_STUDY = '_study' SUFFIX_TEMP = '_temp' SUFFIX_FUSION = '_other_fusion' NONE_VALUE_QUANTITY = -1.0 FIELD_VALUE_OTHER = 65535 FIELD_NAME_ID = "id" FIELD_NAME_RATE_BUILD = "rate_build" FIELD_NAME_RATE_OTHER = "rate_other" FIELD_NAME_SREF_BUILD = "sref_build" FIELD_NAME_SCLA_BUILD = "scla_build" FIELD_NAME_SREF_OTHER = "sref_other" FIELD_NAME_SCLA_OTHER = "scla_other" FIELD_NAME_KAPPA = "kappa" FIELD_NAME_ACCURACY = "accuracy" pixel_size_x, pixel_size_y = getPixelWidthXYImage(image_input) repertory_output = os.path.dirname(vector_grid_output) base_name = os.path.splitext(os.path.basename(vector_grid_output))[0] vector_study = repertory_output + os.sep + base_name + SUFFIX_STUDY + extension_vector vector_grid_temp = repertory_output + os.sep + base_name + SUFFIX_TEMP + extension_vector image_raster_other_fusion = repertory_output + os.sep + base_name + SUFFIX_FUSION + extension_raster # ETAPE 0 : VERIFICATION # Verification de la valeur de la nomemclature à verifier if field_value_verif >= FIELD_VALUE_OTHER: print( cyan + "comparareClassificationToReferenceGrid() : " + bold + red + "Attention de valeur de nomenclature à vérifier : " + str(field_value_verif) + " doit être inferieur à la valeur de fusion des valeur autre arbitraire de : " + str(FIELD_VALUE_OTHER) + endC, file=sys.stderr) sys.exit(1) #exit with an error code # ETAPE 1 : DEFINIR UN SHAPE ZONE D'ETUDE if (not vector_cut_input is None) and (vector_cut_input != "") and ( os.path.isfile(vector_cut_input)): cutting_action = True vector_study = vector_cut_input else: cutting_action = False createVectorMask(image_input, vector_study) # ETAPE 2 : UNIFORMISATION DE LA ZONE OTHER # Réalocation des valeurs de classification pour les valeurs autre que le bati change_reaff_value_list = [] reaff_value_list = identifyPixelValues(image_input) if field_value_verif in reaff_value_list: reaff_value_list.remove(field_value_verif) if no_data_value in reaff_value_list: reaff_value_list.remove(no_data_value) for elem in reaff_value_list: change_reaff_value_list.append(FIELD_VALUE_OTHER) reallocateClassRaster(image_input, image_raster_other_fusion, reaff_value_list, change_reaff_value_list) # ETAPE 3 : CREATION DE LA GRILLE SUR LA ZONE D'ETUDE # Définir les attibuts du fichier attribute_dico = { FIELD_NAME_ID: ogr.OFTInteger, FIELD_NAME_RATE_BUILD: ogr.OFTReal, FIELD_NAME_RATE_OTHER: ogr.OFTReal, FIELD_NAME_SREF_BUILD: ogr.OFTReal, FIELD_NAME_SCLA_BUILD: ogr.OFTReal, FIELD_NAME_SREF_OTHER: ogr.OFTReal, FIELD_NAME_SCLA_OTHER: ogr.OFTReal, FIELD_NAME_KAPPA: ogr.OFTReal, FIELD_NAME_ACCURACY: ogr.OFTReal } nb_polygon = 0 if (not vector_grid_input is None) and (vector_grid_input != "") and ( os.path.isfile(vector_grid_input)): # Utilisation du fichier grille d'entrée # Recopie du fichier grille d'entrée vers le fichier grille de sortie copyVectorFile(vector_grid_input, vector_grid_output) # Ajout des champs au fichier grille de sortie for field_name in attribute_dico: addNewFieldVector(vector_grid_output, field_name, attribute_dico[field_name], None, None, None, format_vector) # Mettre le champs "id" identifiant du carré de l'élément de la grille nb_polygon = updateIndexVector(vector_grid_output, FIELD_NAME_ID, format_vector) else: # Si il n'existe pas de fichier grille on en créer un avec la valeur de size_grid # Creer le fichier grille nb_polygon = createGridVector(vector_study, vector_grid_temp, size_grid, size_grid, attribute_dico, overwrite, epsg, format_vector) # Découper la grille avec le shape zone d'étude cutVectorAll(vector_study, vector_grid_temp, vector_grid_output, format_vector) # ETAPE 4 : CALCUL DE L'INDICATEUR DE QUALITE POUR CHAQUE CASE DE LA GRILLE if debug >= 2: print(bold + "nb_polygon = " + endC + str(nb_polygon) + "\n") # Pour chaque polygone existant sum_rate_quantity_build = 0 nb_rate_sum = 0 size_area_pixel = abs(pixel_size_x * pixel_size_y) for id_polygon in range(nb_polygon): geom_list = getGeomPolygons(vector_grid_output, FIELD_NAME_ID, id_polygon, format_vector) if geom_list is not None and geom_list != []: # and (id_polygon == 24 or id_polygon == 30): if debug >= 1: print(cyan + "comparareClassificationToReferenceGrid() : " + bold + green + "Calcul de la matrice pour le polygon n°: " + str(id_polygon) + endC) geom = geom_list[0] class_ref_list, class_pro_list, rate_quantity_list, kappa, accuracy, matrix = computeQualityIndiceRateQuantity( image_raster_other_fusion, vector_sample_input, repertory_output, base_name + str(id_polygon), geom, size_grid, pixel_size_x, pixel_size_y, field_value_verif, FIELD_VALUE_OTHER, no_data_value, epsg, format_raster, format_vector, extension_raster, extension_vector, overwrite, save_results_intermediate) # Si les calculs indicateurs de qualité sont ok if debug >= 2: print(matrix) if matrix != None and matrix != [] and matrix[0] != []: # Récuperer la quantité de bati et calcul de la surface de référence et de la surface de classification (carreau entier ou pas!) if len(class_ref_list) == 2 and len( class_pro_list ) == 2: # Cas ou l'on a des pixels de build et other (en ref et en prod) rate_quantity_build = rate_quantity_list[0] rate_quantity_other = rate_quantity_list[1] size_area_ref_build = (matrix[0][0] + matrix[0][1]) * size_area_pixel size_area_classif_build = (matrix[0][0] + matrix[1][0]) * size_area_pixel size_area_ref_other = (matrix[1][0] + matrix[1][1]) * size_area_pixel size_area_classif_other = (matrix[0][1] + matrix[1][1]) * size_area_pixel sum_rate_quantity_build += rate_quantity_build nb_rate_sum += 1 else: # Cas ou l'on a uniquement des pixels de build OU uniquement des pixels de other if class_ref_list[ 0] == field_value_verif: # Cas ou l'on a uniquement des pixels references build rate_quantity_build = rate_quantity_list[0] rate_quantity_other = NONE_VALUE_QUANTITY size_area_ref_other = 0 if len( class_pro_list ) == 2: # Cas ou l'on a des pixels de prod build et other size_area_ref_build = ( matrix[0][0] + matrix[0][1]) * size_area_pixel size_area_classif_build = matrix[0][ 0] * size_area_pixel size_area_classif_other = matrix[0][ 1] * size_area_pixel else: size_area_ref_build = matrix[0][0] * size_area_pixel if class_pro_list[ 0] == field_value_verif: # Cas ou l'on a uniquement des pixels prod build size_area_classif_build = matrix[0][ 0] * size_area_pixel size_area_classif_other = 0 else: # Cas ou l'on a uniquement des pixels prod other size_area_classif_build = 0 size_area_classif_other = matrix[0][ 0] * size_area_pixel else: # Cas ou l'on a uniquement des pixels references other rate_quantity_build = NONE_VALUE_QUANTITY rate_quantity_other = rate_quantity_list[0] size_area_ref_build = 0 if len( class_pro_list ) == 2: # Cas ou l'on a des pixels de prod build et other size_area_ref_other = ( matrix[0][0] + matrix[0][1]) * size_area_pixel size_area_classif_build = matrix[0][ 0] * size_area_pixel size_area_classif_other = matrix[0][ 1] * size_area_pixel else: size_area_ref_other = matrix[0][0] * size_area_pixel if class_pro_list[ 0] == field_value_verif: # Cas ou l'on a uniquement des pixels prod build size_area_classif_build = matrix[0][ 0] * size_area_pixel size_area_classif_other = 0 else: # Cas ou l'on a uniquement des pixels prod other size_area_classif_build = 0 size_area_classif_other = matrix[0][ 0] * size_area_pixel # Mettre à jour ses éléments du carré de la grille setAttributeValues( vector_grid_output, FIELD_NAME_ID, id_polygon, { FIELD_NAME_RATE_BUILD: rate_quantity_build, FIELD_NAME_RATE_OTHER: rate_quantity_other, FIELD_NAME_SREF_BUILD: size_area_ref_build, FIELD_NAME_SCLA_BUILD: size_area_classif_build, FIELD_NAME_SREF_OTHER: size_area_ref_other, FIELD_NAME_SCLA_OTHER: size_area_classif_other, FIELD_NAME_KAPPA: kappa, FIELD_NAME_ACCURACY: accuracy }, format_vector) # Calcul de la moyenne if nb_rate_sum != 0: average_quantity_build = sum_rate_quantity_build / nb_rate_sum else: average_quantity_build = 0 if debug >= 2: print(bold + "nb_polygon_used = " + endC + str(nb_rate_sum)) print(bold + "average_quantity_build = " + endC + str(average_quantity_build) + "\n") # ETAPE 5 : SUPPRESIONS FICHIERS INTERMEDIAIRES INUTILES # Suppression des données intermédiairess if not save_results_intermediate: if not cutting_action: if os.path.isfile(vector_study): removeVectorFile(vector_study) if os.path.isfile(image_raster_other_fusion): removeFile(image_raster_other_fusion) if os.path.isfile(vector_grid_temp): removeVectorFile(vector_grid_temp) print(endC) print(bold + green + "## END : COMPARE QUALITY FROM CLASSIF IMAGE BY GRID" + endC) print(endC) # Mise à jour du Log ending_event = "comparareClassificationToReferenceGrid() : ending : " timeLine(path_time_log, ending_event) return average_quantity_build
def runTDCKmeans(input_images, output_dir, input_sea_points, input_cut_vector, no_data_value, path_time_log, nb_classes=5, epsg=2154, format_raster='GTiff', format_vector="ESRI Shapefile", extension_raster=".tif", extension_vector=".shp", save_results_intermediate=True, overwrite=True): # Mise à jour du Log starting_event = "runTDCKmeans() : Select TDC kmeans starting : " timeLine(path_time_log,starting_event) # Initialisation des constantes ID = "id" REP_TEMP = "temp_TDCKmeans" CHANNEL_ORDER = ["Red", "Green", "Blue", "NIR"] # Initialisation des variables repertory_temp = output_dir + os.sep + REP_TEMP # Nettoyage du repertoire de sortie if overwrite and os.path.exists(output_dir): shutil.rmtree(output_dir) # Création du répertoire de sortie s'il n'existe pas déjà if not os.path.exists(output_dir): os.makedirs(output_dir) # Création du répertoire de sortie temporaire s'il n'existe pas déjà if not os.path.exists(repertory_temp): os.makedirs(repertory_temp) # Vérification de l'existence des fichiers if not os.path.exists(input_cut_vector): print(cyan + "runTDCKmeans() : " + bold + red + "The file %s does not exist" %(input_cut_vector) + endC, file=sys.stderr) sys.exit(1) # Affichage des paramètres if debug >= 3: print(bold + green + "Variables dans runTDCKmeans - Variables générales" + endC) print(cyan + "runTDCKmeans() : " + endC + "input_images : " + str(input_images) + endC) print(cyan + "runTDCKmeans() : " + endC + "output_dir : " + str(output_dir) + endC) print(cyan + "runTDCKmeans() : " + endC + "input_sea_points : " + str(input_sea_points) + endC) print(cyan + "runTDCKmeans() : " + endC + "input_cut_vector : " + str(input_cut_vector) + endC) print(cyan + "runTDCKmeans() : " + endC + "nb_classes : " + str(nb_classes) + endC) print(cyan + "runTDCKmeans() : " + endC + "no_data_value : " + str(no_data_value) + endC) print(cyan + "runTDCKmeans() : " + endC + "path_time_log : " + str(path_time_log) + endC) print(cyan + "runTDCKmeans() : " + endC + "epsg : " + str(epsg) + endC) print(cyan + "runTDCKmeans() : " + endC + "format_raster : " + str(format_raster) + endC) print(cyan + "runTDCKmeans() : " + endC + "format_vector : " + str(format_vector) + endC) print(cyan + "runTDCKmeans() : " + endC + "save_results_intermediate : " + str(save_results_intermediate) + endC) print(cyan + "runTDCKmeans() : " + endC + "overwrite : " + str(overwrite) + endC) dico = "" for image in input_images: # Vérification de l'existence des fichiers if not os.path.exists(image): print(cyan + "runTDCKmeans() : " + bold + red + "The file %s does not exist" %(image) + endC, file=sys.stderr) sys.exit(1) # Initialisation des fichiers de sortie image_name = os.path.splitext(os.path.basename(image))[0] im_NDVI = repertory_temp +os.sep + "im_NDVI_" + image_name + extension_raster im_NDWI2 = repertory_temp +os.sep + "im_NDWI2_" + image_name + extension_raster im_BI = repertory_temp + os.sep + "im_BI_" + image_name + extension_raster im_concat = repertory_temp + os.sep + "im_concat_" + image_name + extension_raster im_kmeans = repertory_temp + os.sep + "im_kmeans_" + image_name + extension_raster im_kmeans_decoup = repertory_temp + os.sep + "im_kmeans_decoup_" + image_name + extension_raster im_kmeans_decoup_filter = repertory_temp + os.sep + "im_filter_" + image_name + extension_raster im_kmeans_vect_name = "im_kmeans_vect_" + image_name im_kmeans_vector = output_dir + os.sep + "temp_TDCKMeans" + os.sep + im_kmeans_vect_name + extension_vector # Création des images indice createNDVI(image, im_NDVI, CHANNEL_ORDER) createNDWI2(image, im_NDWI2, CHANNEL_ORDER) createBI(image, im_BI, CHANNEL_ORDER) # Concaténation des bandes des images brute, NDVI, NDWI2 et BI command = "otbcli_ConcatenateImages -il %s %s %s %s -out %s" %(image, im_NDVI, im_NDWI2, im_BI, im_concat) if debug >= 3: print(command) exitCode = os.system(command) if exitCode != 0: raise NameError(cyan + "runTDCKmeans() : " + endC + bold + red + "An error occured during otbcli_ConcatenateImages command. See error message above." + endC) else: print(cyan + "runTDCKmeans() : " + endC + bold + green + "Create binary file %s complete!" %(im_concat) + endC) # K-Means sur l'image concaténée if IS_VERSION_UPPER_OTB_7_0 : classificationKmeans(im_concat, "", im_kmeans, nb_classes, 300, 1, no_data_value, format_raster) if debug >= 2: print(cyan + "runTDCKmeans() : " + endC + bold + green + "Create binary file %s complete!" %(im_kmeans) + endC) else : command = "otbcli_KMeansClassification -in %s -nc %s -nodatalabel %s -rand %s -out %s" %(im_concat, str(nb_classes), str(no_data_value), str(1), im_kmeans) if debug >= 3: print(command) exitCode = os.system(command) if exitCode != 0: raise NameError(cyan + "runTDCKmeans() : " + endC + bold + red + "An error occured during otbcli_ConcatenateImages command. See error message above." + endC) else: print(cyan + "runTDCKmeans() : " + endC + bold + green + "Create binary file %s complete!" %(im_kmeans) + endC) # Découpe du raster image Kmeans cutImageByVector(input_cut_vector, im_kmeans, im_kmeans_decoup, None, None, no_data_value, epsg, format_raster, format_vector) # Nettoyage de l'image raster Kmeans command = "otbcli_ClassificationMapRegularization -io.in %s -io.out %s -ip.radius %s" %(im_kmeans_decoup, im_kmeans_decoup_filter, str(5)) if debug >= 3: print(command) exitCode = os.system(command) if exitCode != 0: raise NameError(cyan + "runTDCKmeans() : " + endC + bold + red + "An error occured during otbcli_ClassificationMapRegularization command. See error message above." + endC) else: if debug >= 2: print(cyan + "runTDCKmeans() : " + endC + bold + green + "Create binary file %s complete!" %(im_kmeans_decoup_filter) + endC) # Vectorisation de l'image découpée polygonizeRaster(im_kmeans_decoup_filter, im_kmeans_vector, im_kmeans_vect_name, ID, format_vector) # Création du dictionnaire pour le passage à PolygonMerToTDC dico += image + ":" + im_kmeans_vector + " " # Appel à PolygonMerToTDC pour l'extraction du TDC dico = dico[:-1] polygonMerToTDC(str(dico), output_dir, input_sea_points, False, 1, input_cut_vector, 1, -1, no_data_value, path_time_log, epsg, format_vector, extension_raster, extension_vector, save_results_intermediate, overwrite) # Suppression du repertoire temporaire if not save_results_intermediate and os.path.exists(repertory_temp): shutil.rmtree(repertory_temp) # Mise à jour du Log ending_event = "runTDCKmeans() : Select TDC kmeans ending : " timeLine(path_time_log,ending_event) return
def classRasterSubSampling(satellite_image_input, classified_image_input, image_output, table_reallocation, sub_sampling_number, no_data_value, path_time_log, rand_otb=0, ram_otb=0, number_of_actives_pixels_threshold=8000, format_raster='GTiff', extension_raster=".tif", save_results_intermediate=False, overwrite=True) : # Mise à jour du Log starting_event = "classRasterSubSampling() : Micro class subsampling on classification image starting : " timeLine(path_time_log,starting_event) if debug >= 3: print(cyan + "classRasterSubSampling() : " + endC + "satellite_image_input : " + str(satellite_image_input) + endC) print(cyan + "classRasterSubSampling() : " + endC + "classified_image_input : " + str(classified_image_input) + endC) print(cyan + "classRasterSubSampling() : " + endC + "image_output : " + str(image_output) + endC) print(cyan + "classRasterSubSampling() : " + endC + "table_reallocation : " + str(table_reallocation) + endC) print(cyan + "classRasterSubSampling() : " + endC + "sub_sampling_number : " + str(sub_sampling_number) + endC) print(cyan + "classRasterSubSampling() : " + endC + "no_data_value : " + str(no_data_value) + endC) print(cyan + "classRasterSubSampling() : " + endC + "path_time_log : " + str(path_time_log) + endC) print(cyan + "classRasterSubSampling() : " + endC + "rand_otb : " + str(rand_otb) + endC) print(cyan + "classRasterSubSampling() : " + endC + "ram_otb : " + str(ram_otb) + endC) print(cyan + "classRasterSubSampling() : " + endC + "number_of_actives_pixels_threshold : " + str(number_of_actives_pixels_threshold) + endC) print(cyan + "classRasterSubSampling() : " + endC + "format_raster : " + str(format_raster) + endC) print(cyan + "classRasterSubSampling() : " + endC + "extension_raster : " + str(extension_raster) + endC) print(cyan + "classRasterSubSampling() : " + endC + "save_results_intermediate : " + str(save_results_intermediate) + endC) print(cyan + "classRasterSubSampling() : " + endC + "overwrite : " + str(overwrite) + endC) # Constantes CODAGE = "uint16" CODAGE_8B = "uint8" TEMP = "TempSubSampling_" MASK_SUF = "_Mask" SUB_SAMPLE_SUF = "_SubSampled" CENTROID_SUF = "_Centroids" TEMP_OUT = "_temp_out" EXTENSION_TXT = ".txt" # Contenu de la nouvelle table text_new_table = "" # CREATION DES NOMS DE CHEMINS UTILES name = os.path.splitext(os.path.basename(image_output))[0] input_classified_image_path = os.path.dirname(classified_image_input) # Ex : D2_Par_Zone/Paysage_01/Corr_2/Resultats/Temp/ temp_sub_sampling_path = input_classified_image_path + os.sep + TEMP + name + os.sep # Dossier contenant les fichiers temporaires de cette brique. Ex : D2_Par_Zone/Paysage_01/Corr_2/Resultats/Temp/Temp_Sub_Sampling/ input_classified_image_complete_name = os.path.basename(classified_image_input) # Ex : Paysage_01_raw.tif input_classified_image_name = os.path.splitext(input_classified_image_complete_name)[0] # Ex : Paysage_01_raw input_classified_image_extend = os.path.splitext(input_classified_image_complete_name)[1] # Ex : .tif image_output_temp = os.path.splitext(image_output)[0] + TEMP_OUT + extension_raster # Ex : D2_Par_Zone/Paysage_01/Corr_2/Resultats/Temp/Temp_Sub_Sampling/Paysage_01_raw_temp.tif # Création de temp_sub_sampling_path s'il n'existe pas if not os.path.isdir(os.path.dirname(temp_sub_sampling_path)) : os.makedirs(os.path.dirname(temp_sub_sampling_path)) print(cyan + "classRasterSubSampling() : " + bold + green + "START ...\n" + endC) # Lecture du fichier table de proposition supp_class_list, reaff_class_list, macro_reaff_class_list, sub_sampling_class_list, sub_sampling_number_list = readReallocationTable(table_reallocation, sub_sampling_number) # Fonction de Lib_text info_table_list = readTextFileBySeparator(table_reallocation, "\n") # Recherche de la liste des micro classes contenu dans le fichier de classification d'entrée class_values_list = identifyPixelValues(classified_image_input) # Supression dans la table des lignes correspondant aux actions "-2" for ligne_table in info_table_list: if not "-2" in ligne_table[0]: text_new_table += str(ligne_table[0]) + "\n" if debug >= 3: print("supp_class_list : " + str(supp_class_list)) print("reaff_class_list : " + str(reaff_class_list)) print("macro_reaff_class_list : " + str(macro_reaff_class_list)) print("sub_sampling_class_list : " + str(sub_sampling_class_list)) print("sub_sampling_number_list : " + str(sub_sampling_number_list)) # Dans cettre brique, on ne s'intéresse qu'à la partie sous echantillonage # Gestion du cas de suppression if len(supp_class_list) > 0: print(cyan + "classRasterSubSampling() : " + bold + yellow + "ATTENTION : Les classes ne sont pas supprimees pour le fichier classification format raster." + '\n' + endC) # Gestion du cas de réaffectation if len(reaff_class_list) > 0: print(cyan + "classRasterSubSampling() : " + bold + yellow + "ATTENTION : la brique SpecificSubSampling ne traite pas les reaffectation. A l'issue de cette brique, verifier la table de reallocation et executer la brique de reallocation." + '\n' + endC) if len(sub_sampling_class_list) > 0 : if debug >= 3: print(cyan + "classRasterSubSampling() : " + bold + green + "DEBUT DU SOUS ECHANTILLONAGE DES CLASSES %s " %(sub_sampling_class_list) + endC) # Parcours des classes à sous échantilloner processing_pass_first = False for idx_class in range(len(sub_sampling_class_list)) : # INITIALISATION DU TRAITEMENT DE LA CLASSE # Classe à sous échantilloner. Ex : 21008 class_to_sub_sample = sub_sampling_class_list[idx_class] if idx_class == 0 or not processing_pass_first : # Image à reclassifier : classified_image_input au premier tour image_to_sub_sample = classified_image_input else : # Image à reclassifier : la sortie de la boucle précédente ensuite image_to_sub_sample = image_output # determiner le label disponible de la classe base_subclass_label = int(class_to_sub_sample/100)*100 subclass_label = base_subclass_label for class_value in class_values_list: if (class_value > subclass_label) and (class_value < base_subclass_label + 100) : subclass_label = class_value subclass_label += 1 # subclass_label = int(class_to_sub_sample/100)*100 + 20 + class_to_sub_sample%20 * 5 # Label de départ des sous classes. Formule proposée : 3 premiers chiffres de class_to_sub_sample puis ajout de 20 + 5 * class_to_sub_sample modulo 20. Ex : 21000 -> 21020, 21001-> 21025, 21002-> 21030 etc... # Part du principe qu'il y a moins de 20 micro classes et que chacune est sous échantillonnée au maximum en 5 sous parties. Si ce n'est pas le cas : A ADAPTER number_of_sub_samples = sub_sampling_number_list[idx_class] # Nombre de sous classes demandées pour le sous échantillonage de class_to_sub_sample. Ex : 4 class_mask_raster = temp_sub_sampling_path + input_classified_image_name + "_" + str(class_to_sub_sample) + MASK_SUF + input_classified_image_extend # Ex : D2_Par_Zone/Paysage_01/Corr_2/Resultats/Temp/Temp_Sub_Sampling/Paysage_01_raw_21008_Mask.tif class_subsampled_raster = temp_sub_sampling_path + input_classified_image_name + "_" + str(class_to_sub_sample) + SUB_SAMPLE_SUF + input_classified_image_extend # Ex : D2_Par_Zone/Paysage_01/Corr_2/Resultats/Temp/Temp_Sub_Sampling/Paysage_01_raw_21008_SubSampled.tif centroid_file = temp_sub_sampling_path + input_classified_image_name + "_" + str(class_to_sub_sample) + CENTROID_SUF + EXTENSION_TXT # Ex : D2_Par_Zone/Paysage_01/Corr_2/Resultats/Temp/Temp_Sub_Sampling/Paysage_01_raw_21008_Centroid.txt if debug >= 5: print(cyan + "classRasterSubSampling() : " + endC + "class_to_sub_sample :" , class_to_sub_sample) print(cyan + "classRasterSubSampling() : " + endC + "subclass_label :" , subclass_label) print(cyan + "classRasterSubSampling() : " + endC + "number_of_sub_samples :" , number_of_sub_samples) print(cyan + "classRasterSubSampling() : " + endC + "class_mask_raster :" , class_mask_raster) print(cyan + "classRasterSubSampling() : " + endC + "class_subsampled_raster :" , class_subsampled_raster) print(cyan + "classRasterSubSampling() : " + endC + "centroid_file :" , centroid_file) if debug >= 3: print(cyan + "classRasterSubSampling() : " + bold + green + "CLASSE %s/%s : SOUS ECHANTILLONAGE DE %s EN %s CLASSES " %(idx_class+1, len(sub_sampling_class_list), class_to_sub_sample, number_of_sub_samples) + endC) # ETAPE 1/5 : EXTRACTION DU MASQUE BINAIRE DES PIXELS CORRESPONDANT A LA CLASSE expression_masque = "\"im1b1 == %s? 1 : 0\"" %(class_to_sub_sample) command = "otbcli_BandMath -il %s -out %s %s -exp %s" %(classified_image_input, class_mask_raster, CODAGE_8B, expression_masque) if debug >=2: print("\n" + cyan + "classRasterSubSampling() : " + bold + green + "CLASSE %s/%s - ETAPE 1/5 : Debut de l extraction du masque binaire de la classe %s" %(idx_class+1, len(sub_sampling_class_list),class_to_sub_sample) + endC) print(command) os.system(command) if debug >=2: print(cyan + "classRasterSubSampling() : " + bold + green + "CLASSE %s/%s - ETAPE 1/5 : Fin de l extraction du masque binaire de la classe %s, disponible ici : %s" %(idx_class+1, len(sub_sampling_class_list),class_to_sub_sample, class_mask_raster) + endC) # TEST POUR SAVOIR SI ON EST EN CAPACITE D'EFFECTUER LE KMEANS number_of_actives_pixels = countPixelsOfValue(class_mask_raster, 1) # Comptage du nombre de pixels disponibles pour effectuer le kmeans if number_of_actives_pixels > (number_of_sub_samples * number_of_actives_pixels_threshold) : # Cas où il y a plus de pixels disponibles pour effectuer le kmeans que le seuil # ETAPE 2/5 : CLASSIFICATION NON SUPERVISEE DES PIXELS CORRESPONDANT A LA CLASSE if debug >= 3: print("\n" + cyan + "classRasterSubSampling() : " + bold + green + "CLASSE %s/%s - ETAPE 2/5 : Il y a assez de pixels pour faire le sous echantillonage : %s sur %s requis au minimum " %(idx_class+1, len(sub_sampling_class_list), number_of_actives_pixels, int(number_of_sub_samples) * number_of_actives_pixels_threshold) + endC) if debug >=2: print("\n" + cyan + "classRasterSubSampling() : " + bold + green + "CLASSE %s/%s - ETAPE 2/5 : Debut du sous echantillonage par classification non supervisee en %s classes " %(idx_class+1, len(sub_sampling_class_list), number_of_sub_samples) + endC) # appel du kmeans input_mask_list = [] input_mask_list.append(class_mask_raster) output_masked_image_list = [] output_masked_image_list.append(class_subsampled_raster) output_centroids_files_list = [] output_centroids_files_list.append(centroid_file) macroclass_sampling_list = [] macroclass_sampling_list.append(number_of_sub_samples) macroclass_labels_list = [] macroclass_labels_list.append(subclass_label) applyKmeansMasks(satellite_image_input, input_mask_list, "", "", output_masked_image_list, output_centroids_files_list, macroclass_sampling_list, macroclass_labels_list, no_data_value, path_time_log, 200, 1, -1, 0.0, rand_otb, ram_otb, number_of_actives_pixels_threshold, format_raster, extension_raster, save_results_intermediate, overwrite) if debug >=2: print(cyan + "classRasterSubSampling() : " + bold + green + "CLASSE %s/%s - ETAPE 2/5 : Fin du sous echantillonage par classification non supervisee en %s classes, disponible ici %s : " %(idx_class+1, len(sub_sampling_class_list), number_of_sub_samples, class_subsampled_raster) + endC) # ETAPE 3/5 : INTEGRATION DES NOUVELLES SOUS CLASSES DANS LA TABLE DE REALLOCATION # Ouveture du fichier table de proposition pour re-ecriture for i in range(number_of_sub_samples): class_values_list.append(subclass_label + i) text_new_table += str(subclass_label + i) + ";" + str(subclass_label + i) + "; METTRE A JOUR MANUELLEMENT (origine : " + str(class_to_sub_sample) + ")" + "\n" # ETAPE 4/5 : APPLICATION DU SOUS ECHANTILLONAGE AU RESULTAT DE CLASSIFICATION expression_application_sous_echantillonage = "\"im1b1 == %s? im2b1 : im1b1\"" %(class_to_sub_sample) command = "otbcli_BandMath -il %s %s -out %s %s -exp %s" %(image_to_sub_sample, class_subsampled_raster, image_output_temp, CODAGE, expression_application_sous_echantillonage) if debug >=2: print("\n" + cyan + "classRasterSubSampling() : " + bold + green + "CLASSE %s/%s - ETAPE 4/5 : Debut de l application du sous echantillonage present dans %s sur %s" %(idx_class+1, len(sub_sampling_class_list), class_subsampled_raster, classified_image_input) + endC) print(command) os.system(command) if debug >=2: print(cyan + "classRasterSubSampling() : " + bold + green + "CLASSE %s/%s - ETAPE 4/5 : Fin de l application du sous echantillonage present dans %s sur %s, sortie disponible ici : %s" %(idx_class+1, len(sub_sampling_class_list), class_subsampled_raster, classified_image_input, image_output_temp) + endC) # ETAPE 5/5 : GESTION DES RENOMMAGES ET SUPPRESSIONS if debug >=2: print("\n" + cyan + "classRasterSubSampling() : " + bold + green + "CLASSE %s/%s - ETAPE 5/5 : Debut du renommage et suppression des dossiers intermediaires" %(idx_class+1, len(sub_sampling_class_list)) + endC) if debug >=3 : print("\n" + green + "classified image input: %s" %(classified_image_input) + endC) print("\n" + green + "image to sub sample: %s" %(image_to_sub_sample) + endC) print("\n" + green + "image temp : %s" %(image_output_temp) + endC) print("\n" + green + "image output : %s" %(image_output) + endC) # Si l'image d'entrée et l'image de sorte sont le même fichier on efface le fichier d'entrée pour le re-creer avec le fichier re-travaillé if image_output == classified_image_input and os.path.isfile(classified_image_input) : removeFile(classified_image_input) os.rename(image_output_temp,image_output) processing_pass_first = True # SUPPRESSION DES FICHIERS TEMPORAIRES if not save_results_intermediate : if os.path.isfile(class_mask_raster) : removeFile(class_mask_raster) if os.path.isfile(class_subsampled_raster) : removeFile(class_subsampled_raster) if os.path.isfile(centroid_file) : removeFile(centroid_file) if debug >=2: print(cyan + "classRasterSubSampling() : " + bold + green + "CLASSE %s/%s - ETAPE 5/5 : Fin du renommage et suppression des dossiers intermediaires" %(idx_class+1, len(sub_sampling_class_list)) + endC) else: # Cas où il n'y a pas assez de pixels pour effectuer le kmeans if debug >=2: print("\n" + cyan + "classRasterSubSampling() : " + bold + yellow + "CLASSE %s/%s - ETAPE 2/5 : Nombre insuffisant de pixels disponibles pour appliquer le kmeans : %s sur %s requis au minimum " %(idx_class+1, len(sub_sampling_class_list), number_of_actives_pixels, int(number_of_sub_samples) * number_of_actives_pixels_threshold) + endC) print(cyan + "classRasterSubSampling() : " + bold + yellow + "CLASSE %s/%s - ETAPE 2/5 : SOUS ECHANTILLONAGE NON APPLIQUE A LA CLASSE %s" %(idx_class+1, len(sub_sampling_class_list), class_to_sub_sample) + endC + "\n") # MISE A JOUR DU FICHIER image_to_sub_sample if idx_class == 0: processing_pass_first = False # MISE A JOUR DE LA TABLE DE REALLOCATION text_new_table += str(class_to_sub_sample) + ";" + str(class_to_sub_sample) + ";CLASSE TROP PETITE POUR SOUS ECHANTILLONAGE" + "\n" # SUPPRESSION DU MASQUE if not save_results_intermediate and os.path.isfile(class_mask_raster) : removeFile(class_mask_raster) else: shutil.copy2(classified_image_input, image_output) # Copie du raster d'entree si pas de sous-echantillonnage # Ecriture de la nouvelle table dans le fichier writeTextFile(table_reallocation, text_new_table) # SUPPRESSION DU DOSSIER ET DES FICHIERS TEMPORAIRES if not save_results_intermediate and os.path.isdir(os.path.dirname(temp_sub_sampling_path)) : shutil.rmtree(os.path.dirname(temp_sub_sampling_path)) print(cyan + "classRasterSubSampling() : " + bold + green + "END\n" + endC) # Mise à jour du Log ending_event = "classRasterSubSampling() : Micro class subsampling on classification image ending : " timeLine(path_time_log,ending_event) return
def createMacroSamples(image_input, vector_to_cut_input, vector_sample_output, raster_sample_output, bd_vector_input_list, bd_buff_list, sql_expression_list, path_time_log, macro_sample_name="", simplify_vector_param=10.0, format_vector='ESRI Shapefile', extension_vector=".shp", save_results_intermediate=False, overwrite=True): # Mise à jour du Log starting_event = "createMacroSamples() : create macro samples starting : " timeLine(path_time_log, starting_event) if debug >= 3: print(bold + green + "createMacroSamples() : Variables dans la fonction" + endC) print(cyan + "createMacroSamples() : " + endC + "image_input : " + str(image_input) + endC) print(cyan + "createMacroSamples() : " + endC + "vector_to_cut_input : " + str(vector_to_cut_input) + endC) print(cyan + "createMacroSamples() : " + endC + "vector_sample_output : " + str(vector_sample_output) + endC) print(cyan + "createMacroSamples() : " + endC + "raster_sample_output : " + str(raster_sample_output) + endC) print(cyan + "createMacroSamples() : " + endC + "bd_vector_input_list : " + str(bd_vector_input_list) + endC) print(cyan + "createMacroSamples() : " + endC + "bd_buff_list : " + str(bd_buff_list) + endC) print(cyan + "createMacroSamples() : " + endC + "sql_expression_list : " + str(sql_expression_list) + endC) print(cyan + "createMacroSamples() : " + endC + "path_time_log : " + str(path_time_log) + endC) print(cyan + "createMacroSamples() : " + endC + "macro_sample_name : " + str(macro_sample_name) + endC) print(cyan + "createMacroSamples() : " + endC + "simplify_vector_param : " + str(simplify_vector_param) + endC) print(cyan + "createMacroSamples() : " + endC + "format_vector : " + str(format_vector)) print(cyan + "createMacroSamples() : " + endC + "extension_vector : " + str(extension_vector) + endC) print(cyan + "createMacroSamples() : " + endC + "save_results_intermediate : " + str(save_results_intermediate) + endC) print(cyan + "createMacroSamples() : " + endC + "overwrite : " + str(overwrite) + endC) # Constantes FOLDER_MASK_TEMP = "Mask_" FOLDER_CUTTING_TEMP = "Cut_" FOLDER_FILTERING_TEMP = "Filter_" FOLDER_BUFF_TEMP = "Buff_" SUFFIX_MASK_CRUDE = "_crude" SUFFIX_MASK = "_mask" SUFFIX_VECTOR_CUT = "_cut" SUFFIX_VECTOR_FILTER = "_filt" SUFFIX_VECTOR_BUFF = "_buff" CODAGE = "uint8" # ETAPE 1 : NETTOYER LES DONNEES EXISTANTES print(cyan + "createMacroSamples() : " + bold + green + "Nettoyage de l'espace de travail..." + endC) # Nom du repertoire de calcul repertory_macrosamples_output = os.path.dirname(vector_sample_output) # Test si le vecteur echantillon existe déjà et si il doit être écrasés check = os.path.isfile(vector_sample_output) or os.path.isfile( raster_sample_output) if check and not overwrite: # Si les fichiers echantillons existent deja et que overwrite n'est pas activé print(bold + yellow + "File sample : " + vector_sample_output + " already exists and will not be created again." + endC) else: if check: try: removeVectorFile(vector_sample_output) removeFile(raster_sample_output) except Exception: pass # si le fichier n'existe pas, il ne peut pas être supprimé : cette étape est ignorée # Définition des répertoires temporaires repertory_mask_temp = repertory_macrosamples_output + os.sep + FOLDER_MASK_TEMP + macro_sample_name repertory_samples_cutting_temp = repertory_macrosamples_output + os.sep + FOLDER_CUTTING_TEMP + macro_sample_name repertory_samples_filtering_temp = repertory_macrosamples_output + os.sep + FOLDER_FILTERING_TEMP + macro_sample_name repertory_samples_buff_temp = repertory_macrosamples_output + os.sep + FOLDER_BUFF_TEMP + macro_sample_name if debug >= 4: print(cyan + "createMacroSamples() : " + endC + "Création du répertoire : " + str(repertory_mask_temp)) print(cyan + "createMacroSamples() : " + endC + "Création du répertoire : " + str(repertory_samples_cutting_temp)) print(cyan + "createMacroSamples() : " + endC + "Création du répertoire : " + str(repertory_samples_buff_temp)) # Création des répertoires temporaire qui n'existent pas if not os.path.isdir(repertory_macrosamples_output): os.makedirs(repertory_macrosamples_output) if not os.path.isdir(repertory_mask_temp): os.makedirs(repertory_mask_temp) if not os.path.isdir(repertory_samples_cutting_temp): os.makedirs(repertory_samples_cutting_temp) if not os.path.isdir(repertory_samples_filtering_temp): os.makedirs(repertory_samples_filtering_temp) if not os.path.isdir(repertory_samples_buff_temp): os.makedirs(repertory_samples_buff_temp) # Nettoyage des répertoires temporaire qui ne sont pas vide cleanTempData(repertory_mask_temp) cleanTempData(repertory_samples_cutting_temp) cleanTempData(repertory_samples_filtering_temp) cleanTempData(repertory_samples_buff_temp) print(cyan + "createMacroSamples() : " + bold + green + "... fin du nettoyage" + endC) # ETAPE 2 : DECOUPAGE DES VECTEURS print(cyan + "createMacroSamples() : " + bold + green + "Decoupage des echantillons ..." + endC) if vector_to_cut_input == None: # 2.1 : Création du masque délimitant l'emprise de la zone par image image_name = os.path.splitext(os.path.basename(image_input))[0] vector_mask = repertory_mask_temp + os.sep + image_name + SUFFIX_MASK_CRUDE + extension_vector cols, rows, num_band = getGeometryImage(image_input) no_data_value = getNodataValueImage(image_input, num_band) if no_data_value == None: no_data_value = 0 createVectorMask(image_input, vector_mask, no_data_value, format_vector) # 2.2 : Simplification du masque vector_simple_mask = repertory_mask_temp + os.sep + image_name + SUFFIX_MASK + extension_vector simplifyVector(vector_mask, vector_simple_mask, simplify_vector_param, format_vector) else: vector_simple_mask = vector_to_cut_input # 2.3 : Découpage des vecteurs de bd exogenes avec le masque vectors_cut_list = [] for vector_input in bd_vector_input_list: vector_name = os.path.splitext(os.path.basename(vector_input))[0] vector_cut = repertory_samples_cutting_temp + os.sep + vector_name + SUFFIX_VECTOR_CUT + extension_vector vectors_cut_list.append(vector_cut) cutoutVectors(vector_simple_mask, bd_vector_input_list, vectors_cut_list, format_vector) print(cyan + "createMacroSamples() : " + bold + green + "... fin du decoupage" + endC) # ETAPE 3 : FILTRAGE DES VECTEURS print(cyan + "createMacroSamples() : " + bold + green + "Filtrage des echantillons ..." + endC) vectors_filtered_list = [] if sql_expression_list != []: for idx_vector in range(len(bd_vector_input_list)): vector_name = os.path.splitext( os.path.basename(bd_vector_input_list[idx_vector]))[0] vector_cut = vectors_cut_list[idx_vector] if idx_vector < len(sql_expression_list): sql_expression = sql_expression_list[idx_vector] else: sql_expression = "" vector_filtered = repertory_samples_filtering_temp + os.sep + vector_name + SUFFIX_VECTOR_FILTER + extension_vector vectors_filtered_list.append(vector_filtered) # Filtrage par ogr2ogr if sql_expression != "": names_attribut_list = getAttributeNameList( vector_cut, format_vector) column = "'" for name_attribut in names_attribut_list: column += name_attribut + ", " column = column[0:len(column) - 2] column += "'" ret = filterSelectDataVector(vector_cut, vector_filtered, column, sql_expression, format_vector) if not ret: print( cyan + "createMacroSamples() : " + bold + yellow + "Attention problème lors du filtrage des BD vecteurs l'expression SQL %s est incorrecte" % (sql_expression) + endC) copyVectorFile(vector_cut, vector_filtered) else: print(cyan + "createMacroSamples() : " + bold + yellow + "Pas de filtrage sur le fichier du nom : " + endC + vector_filtered) copyVectorFile(vector_cut, vector_filtered) else: print(cyan + "createMacroSamples() : " + bold + yellow + "Pas de filtrage demandé" + endC) for idx_vector in range(len(bd_vector_input_list)): vector_cut = vectors_cut_list[idx_vector] vectors_filtered_list.append(vector_cut) print(cyan + "createMacroSamples() : " + bold + green + "... fin du filtrage" + endC) # ETAPE 4 : BUFFERISATION DES VECTEURS print(cyan + "createMacroSamples() : " + bold + green + "Mise en place des tampons..." + endC) vectors_buffered_list = [] if bd_buff_list != []: # Parcours des vecteurs d'entrée for idx_vector in range(len(bd_vector_input_list)): vector_name = os.path.splitext( os.path.basename(bd_vector_input_list[idx_vector]))[0] buff = bd_buff_list[idx_vector] vector_filtered = vectors_filtered_list[idx_vector] vector_buffered = repertory_samples_buff_temp + os.sep + vector_name + SUFFIX_VECTOR_BUFF + extension_vector if buff != 0: if os.path.isfile(vector_filtered): if debug >= 3: print(cyan + "createMacroSamples() : " + endC + "vector_filtered : " + str(vector_filtered) + endC) print(cyan + "createMacroSamples() : " + endC + "vector_buffered : " + str(vector_buffered) + endC) print(cyan + "createMacroSamples() : " + endC + "buff : " + str(buff) + endC) bufferVector(vector_filtered, vector_buffered, buff, "", 1.0, 10, format_vector) else: print(cyan + "createMacroSamples() : " + bold + yellow + "Pas de fichier du nom : " + endC + vector_filtered) else: print(cyan + "createMacroSamples() : " + bold + yellow + "Pas de tampon sur le fichier du nom : " + endC + vector_filtered) copyVectorFile(vector_filtered, vector_buffered) vectors_buffered_list.append(vector_buffered) else: print(cyan + "createMacroSamples() : " + bold + yellow + "Pas de tampon demandé" + endC) for idx_vector in range(len(bd_vector_input_list)): vector_filtered = vectors_filtered_list[idx_vector] vectors_buffered_list.append(vector_filtered) print(cyan + "createMacroSamples() : " + bold + green + "... fin de la mise en place des tampons" + endC) # ETAPE 5 : FUSION DES SHAPES print(cyan + "createMacroSamples() : " + bold + green + "Fusion par macroclasse ..." + endC) # si une liste de fichier shape à fusionner existe if not vectors_buffered_list: print(cyan + "createMacroSamples() : " + bold + yellow + "Pas de fusion sans donnee à fusionner" + endC) # s'il n'y a qu'un fichier shape en entrée elif len(vectors_buffered_list) == 1: print(cyan + "createMacroSamples() : " + bold + yellow + "Pas de fusion pour une seule donnee à fusionner" + endC) copyVectorFile(vectors_buffered_list[0], vector_sample_output) else: # Fusion des fichiers shape vectors_buffered_controled_list = [] for vector_buffered in vectors_buffered_list: if os.path.isfile(vector_buffered) and (getGeometryType( vector_buffered, format_vector) in ( 'POLYGON', 'MULTIPOLYGON')) and (getNumberFeature( vector_buffered, format_vector) > 0): vectors_buffered_controled_list.append(vector_buffered) else: print( cyan + "createMacroSamples() : " + bold + red + "Attention fichier bufferisé est vide il ne sera pas fusionné : " + endC + vector_buffered, file=sys.stderr) fusionVectors(vectors_buffered_controled_list, vector_sample_output, format_vector) print(cyan + "createMacroSamples() : " + bold + green + "... fin de la fusion" + endC) # ETAPE 6 : CREATION DU FICHIER RASTER RESULTAT SI DEMANDE # Creation d'un masque binaire if raster_sample_output != "" and image_input != "": repertory_output = os.path.dirname(raster_sample_output) if not os.path.isdir(repertory_output): os.makedirs(repertory_output) rasterizeBinaryVector(vector_sample_output, image_input, raster_sample_output, 1, CODAGE) # ETAPE 7 : SUPPRESIONS FICHIERS INTERMEDIAIRES INUTILES # Suppression des données intermédiaires if not save_results_intermediate: # Supression du fichier de decoupe si celui ci a été créer if vector_simple_mask != vector_to_cut_input: if os.path.isfile(vector_simple_mask): removeVectorFile(vector_simple_mask) # Suppression des repertoires temporaires deleteDir(repertory_mask_temp) deleteDir(repertory_samples_cutting_temp) deleteDir(repertory_samples_filtering_temp) deleteDir(repertory_samples_buff_temp) # Mise à jour du Log ending_event = "createMacroSamples() : create macro samples ending : " timeLine(path_time_log, ending_event) return
def statisticsVectorRaster(image_input, vector_input, vector_output, band_number, enable_stats_all_count, enable_stats_columns_str, enable_stats_columns_real, col_to_delete_list, col_to_add_list, class_label_dico, path_time_log, clean_small_polygons=False, format_vector='ESRI Shapefile', save_results_intermediate=False, overwrite=True): # INITIALISATION if debug >= 3: print(cyan + "statisticsVectorRaster() : " + endC + "image_input : " + str(image_input) + endC) print(cyan + "statisticsVectorRaster() : " + endC + "vector_input : " + str(vector_input) + endC) print(cyan + "statisticsVectorRaster() : " + endC + "vector_output : " + str(vector_output) + endC) print(cyan + "statisticsVectorRaster() : " + endC + "band_number : " + str(band_number) + endC) print(cyan + "statisticsVectorRaster() : " + endC + "enable_stats_all_count : " + str(enable_stats_all_count) + endC) print(cyan + "statisticsVectorRaster() : " + endC + "enable_stats_columns_str : " + str(enable_stats_columns_str) + endC) print(cyan + "statisticsVectorRaster() : " + endC + "enable_stats_columns_real : " + str(enable_stats_columns_real) + endC) print(cyan + "statisticsVectorRaster() : " + endC + "col_to_delete_list : " + str(col_to_delete_list) + endC) print(cyan + "statisticsVectorRaster() : " + endC + "col_to_add_list : " + str(col_to_add_list) + endC) print(cyan + "statisticsVectorRaster() : " + endC + "class_label_dico : " + str(class_label_dico) + endC) print(cyan + "statisticsVectorRaster() : " + endC + "clean_small_polygons : " + str(clean_small_polygons) + endC) print(cyan + "statisticsVectorRaster() : " + endC + "path_time_log : " + str(path_time_log) + endC) print(cyan + "statisticsVectorRaster() : " + endC + "format_vector : " + str(format_vector) + endC) print(cyan + "statisticsVectorRaster() : " + endC + "save_results_intermediate : " + str(save_results_intermediate) + endC) print(cyan + "statisticsVectorRaster() : " + endC + "overwrite : " + str(overwrite) + endC) # Constantes PREFIX_AREA_COLUMN = "S_" # Mise à jour du Log starting_event = "statisticsVectorRaster() : Compute statistic crossing starting : " timeLine(path_time_log, starting_event) # creation du fichier vecteur de sortie if vector_output == "": vector_output = vector_input # Précisé uniquement pour l'affichage else: # Copy vector_output copyVectorFile(vector_input, vector_output, format_vector) # Vérifications image_xmin, image_xmax, image_ymin, image_ymax = getEmpriseImage( image_input) vector_xmin, vector_xmax, vector_ymin, vector_ymax = getEmpriseFile( vector_output, format_vector) extension_vector = os.path.splitext(vector_output)[1] if round(vector_xmin, 4) < round(image_xmin, 4) or round( vector_xmax, 4) > round(image_xmax, 4) or round( vector_ymin, 4) < round(image_ymin, 4) or round( vector_ymax, 4) > round(image_ymax, 4): print(cyan + "statisticsVectorRaster() : " + bold + red + "image_xmin, image_xmax, image_ymin, image_ymax" + endC, image_xmin, image_xmax, image_ymin, image_ymax, file=sys.stderr) print(cyan + "statisticsVectorRaster() : " + bold + red + "vector_xmin, vector_xmax, vector_ymin, vector_ymax" + endC, vector_xmin, vector_xmax, vector_ymin, vector_ymax, file=sys.stderr) raise NameError( cyan + "statisticsVectorRaster() : " + bold + red + "The extend of the vector file (%s) is greater than the image file (%s)" % (vector_output, image_input) + endC) pixel_size = getPixelSizeImage(image_input) # Suppression des très petits polygones qui introduisent des valeurs NaN if clean_small_polygons: min_size_area = pixel_size * 2 vector_temp = os.path.splitext( vector_output)[0] + "_temp" + extension_vector cleanMiniAreaPolygons(vector_output, vector_temp, min_size_area, '', format_vector) removeVectorFile(vector_output, format_vector) renameVectorFile(vector_temp, vector_output) # Récuperation du driver pour le format shape driver = ogr.GetDriverByName(format_vector) # Ouverture du fichier shape en lecture-écriture data_source = driver.Open(vector_output, 1) # 0 means read-only - 1 means writeable. if data_source is None: print(cyan + "statisticsVectorRaster() : " + bold + red + "Impossible d'ouvrir le fichier shape : " + vector_output + endC, file=sys.stderr) sys.exit(1) # exit with an error code # Récupération du vecteur layer = data_source.GetLayer( 0) # Recuperation de la couche (une couche contient les polygones) layer_definition = layer.GetLayerDefn( ) # GetLayerDefn => returns the field names of the user defined (created) fields # ETAPE 1/4 : CREATION AUTOMATIQUE DU DICO DE VALEUR SI IL N'EXISTE PAS if enable_stats_all_count and class_label_dico == {}: image_values_list = identifyPixelValues(image_input) # Pour toutes les valeurs for id_value in image_values_list: class_label_dico[id_value] = str(id_value) # Suppression de la valeur no date à 0 if 0 in class_label_dico: del class_label_dico[0] if debug >= 2: print(class_label_dico) # ETAPE 2/4 : CREATION DES COLONNES DANS LE FICHIER SHAPE if debug >= 2: print( cyan + "statisticsVectorRaster() : " + bold + green + "ETAPE 1/3 : DEBUT DE LA CREATION DES COLONNES DANS LE FICHIER VECTEUR %s" % (vector_output) + endC) # En entrée : # col_to_add_list = [UniqueID, majority/DateMaj/SrcMaj, minority, min, max, mean, median, sum, std, unique, range, all, count, all_S, count_S] - all traduisant le class_label_dico en autant de colonnes # Sous_listes de col_to_add_list à identifier pour des facilités de manipulations ultérieures: # col_to_add_inter01_list = [majority/DateMaj/SrcMaj, minority, min, max, mean, median, sum, std, unique, range] # col_to_add_inter02_list = [majority, minority, min, max, mean, median, sum, std, unique, range, all, count, all_S, count_S] # Construction des listes intermédiaires col_to_add_inter01_list = [] # Valeurs à injecter dans des colonnes - Format String if enable_stats_columns_str: stats_columns_str_list = ['majority', 'minority'] for e in stats_columns_str_list: col_to_add_list.append(e) # Valeurs à injecter dans des colonnes - Format Nbr if enable_stats_columns_real: stats_columns_real_list = [ 'min', 'max', 'mean', 'median', 'sum', 'std', 'unique', 'range' ] for e in stats_columns_real_list: col_to_add_list.append(e) # Valeurs à injecter dans des colonnes - Format Nbr if enable_stats_all_count: stats_all_count_list = ['all', 'count'] for e in stats_all_count_list: col_to_add_list.append(e) # Valeurs à injecter dans des colonnes - si class_label_dico est non vide if class_label_dico != {}: stats_all_count_list = ['all', 'count'] for e in stats_all_count_list: if not e in col_to_add_list: col_to_add_list.append(e) # Ajout colonne par colonne if "majority" in col_to_add_list: col_to_add_inter01_list.append("majority") if "DateMaj" in col_to_add_list: col_to_add_inter01_list.append("DateMaj") if "SrcMaj" in col_to_add_list: col_to_add_inter01_list.append("SrcMaj") if "minority" in col_to_add_list: col_to_add_inter01_list.append("minority") if "min" in col_to_add_list: col_to_add_inter01_list.append("min") if "max" in col_to_add_list: col_to_add_inter01_list.append("max") if "mean" in col_to_add_list: col_to_add_inter01_list.append("mean") if "median" in col_to_add_list: col_to_add_inter01_list.append("median") if "sum" in col_to_add_list: col_to_add_inter01_list.append("sum") if "std" in col_to_add_list: col_to_add_inter01_list.append("std") if "unique" in col_to_add_list: col_to_add_inter01_list.append("unique") if "range" in col_to_add_list: col_to_add_inter01_list.append("range") # Copy de col_to_add_inter01_list dans col_to_add_inter02_list col_to_add_inter02_list = list(col_to_add_inter01_list) if "all" in col_to_add_list: col_to_add_inter02_list.append("all") if "count" in col_to_add_list: col_to_add_inter02_list.append("count") if "all_S" in col_to_add_list: col_to_add_inter02_list.append("all_S") if "count_S" in col_to_add_list: col_to_add_inter02_list.append("count_S") if "DateMaj" in col_to_add_inter02_list: col_to_add_inter02_list.remove("DateMaj") col_to_add_inter02_list.insert(0, "majority") if "SrcMaj" in col_to_add_inter02_list: col_to_add_inter02_list.remove("SrcMaj") col_to_add_inter02_list.insert(0, "majority") # Valeurs à injecter dans des colonnes - Format Nbr if enable_stats_all_count: stats_all_count_list = ['all_S', 'count_S'] for e in stats_all_count_list: col_to_add_list.append(e) # Creation de la colonne de l'identifiant unique if ("UniqueID" in col_to_add_list) or ("uniqueID" in col_to_add_list) or ( "ID" in col_to_add_list): field_defn = ogr.FieldDefn( "ID", ogr.OFTInteger ) # Création du nom du champ dans l'objet stat_classif_field_defn layer.CreateField(field_defn) if debug >= 3: print(cyan + "statisticsVectorRaster() : " + endC + "Creation de la colonne : ID") # Creation des colonnes de col_to_add_inter01_list ([majority/DateMaj/SrcMaj, minority, min, max, mean, median, sum, std, unique, range]) for col in col_to_add_list: if layer_definition.GetFieldIndex( col ) == -1: # Vérification de l'existence de la colonne col (retour = -1 : elle n'existe pas) if col == 'majority' or col == 'DateMaj' or col == 'SrcMaj' or col == 'minority': # Identification de toutes les colonnes remplies en string stat_classif_field_defn = ogr.FieldDefn( col, ogr.OFTString ) # Création du champ (string) dans l'objet stat_classif_field_defn layer.CreateField(stat_classif_field_defn) elif col == 'mean' or col == 'median' or col == 'sum' or col == 'std' or col == 'unique' or col == 'range' or col == 'max' or col == 'min': stat_classif_field_defn = ogr.FieldDefn( col, ogr.OFTReal ) # Création du champ (real) dans l'objet stat_classif_field_defn # Définition de la largeur du champ stat_classif_field_defn.SetWidth(20) # Définition de la précision du champ valeur flottante stat_classif_field_defn.SetPrecision(2) layer.CreateField(stat_classif_field_defn) if debug >= 3: print(cyan + "statisticsVectorRaster() : " + endC + "Creation de la colonne : " + str(col)) # Creation des colonnes reliées au dictionnaire if ('all' in col_to_add_list) or ('count' in col_to_add_list) or ( 'all_S' in col_to_add_list) or ('count_S' in col_to_add_list): for col in class_label_dico: # Gestion du nom de la colonne correspondant à la classe name_col = class_label_dico[col] if len(name_col) > 10: name_col = name_col[:10] print( cyan + "statisticsVectorRaster() : " + bold + yellow + "Nom de la colonne trop long. Il sera tronque a 10 caracteres en cas d'utilisation: " + endC + name_col) # Gestion du nom de la colonne correspondant à la surface de la classe name_col_area = PREFIX_AREA_COLUMN + name_col if len(name_col_area) > 10: name_col_area = name_col_area[:10] if debug >= 3: print( cyan + "statisticsVectorRaster() : " + bold + yellow + "Nom de la colonne trop long. Il sera tronque a 10 caracteres en cas d'utilisation: " + endC + name_col_area) # Ajout des colonnes de % de répartition des éléments du raster if ('all' in col_to_add_list) or ('count' in col_to_add_list): if layer_definition.GetFieldIndex( name_col ) == -1: # Vérification de l'existence de la colonne name_col (retour = -1 : elle n'existe pas) stat_classif_field_defn = ogr.FieldDefn( name_col, ogr.OFTReal ) # Création du champ (real) dans l'objet stat_classif_field_defn # Définition de la largeur du champ stat_classif_field_defn.SetWidth(20) # Définition de la précision du champ valeur flottante stat_classif_field_defn.SetPrecision(2) if debug >= 3: print(cyan + "statisticsVectorRaster() : " + endC + "Creation de la colonne : " + str(name_col)) layer.CreateField( stat_classif_field_defn) # Ajout du champ # Ajout des colonnes de surface des éléments du raster if ('all_S' in col_to_add_list) or ('count_S' in col_to_add_list): if layer_definition.GetFieldIndex( name_col_area ) == -1: # Vérification de l'existence de la colonne name_col_area (retour = -1 : elle n'existe pas) stat_classif_field_defn = ogr.FieldDefn( name_col_area, ogr.OFTReal ) # Création du nom du champ dans l'objet stat_classif_field_defn # Définition de la largeur du champ stat_classif_field_defn.SetWidth(20) # Définition de la précision du champ valeur flottante stat_classif_field_defn.SetPrecision(2) if debug >= 3: print(cyan + "statisticsVectorRaster() : " + endC + "Creation de la colonne : " + str(name_col_area)) layer.CreateField( stat_classif_field_defn) # Ajout du champ if debug >= 2: print( cyan + "statisticsVectorRaster() : " + bold + green + "ETAPE 1/3 : FIN DE LA CREATION DES COLONNES DANS LE FICHIER VECTEUR %s" % (vector_output) + endC) # ETAPE 3/4 : REMPLISSAGE DES COLONNES DU VECTEUR if debug >= 2: print(cyan + "statisticsVectorRaster() : " + bold + green + "ETAPE 2/3 : DEBUT DU REMPLISSAGE DES COLONNES DU VECTEUR " + endC) # Calcul des statistiques col_to_add_inter02_list = [majority, minority, min, max, mean, median, sum, std, unique, range, all, count, all_S, count_S] de croisement images_raster / vecteur # Utilisation de la librairie rasterstat if debug >= 3: print(cyan + "statisticsVectorRaster() : " + bold + green + "Calcul des statistiques " + endC + "Stats : %s - Vecteur : %s - Raster : %s" % (col_to_add_inter02_list, vector_output, image_input) + endC) stats_info_list = raster_stats(vector_output, image_input, band_num=band_number, stats=col_to_add_inter02_list) # Decompte du nombre de polygones num_features = layer.GetFeatureCount() if debug >= 3: print(cyan + "statisticsVectorRaster() : " + bold + green + "Remplissage des colonnes polygone par polygone " + endC) if debug >= 3: print(cyan + "statisticsVectorRaster() : " + endC + "Nombre total de polygones : " + str(num_features)) polygone_count = 0 for polygone_stats in stats_info_list: # Pour chaque polygone représenté dans stats_info_list - et il y a autant de polygone que dans le fichier vecteur # Extraction de feature feature = layer.GetFeature(polygone_stats['__fid__']) polygone_count = polygone_count + 1 if debug >= 3 and polygone_count % 10000 == 0: print(cyan + "statisticsVectorRaster() : " + endC + "Avancement : %s polygones traites sur %s" % (polygone_count, num_features)) if debug >= 5: print( cyan + "statisticsVectorRaster() : " + endC + "Traitement du polygone : ", stats_info_list.index(polygone_stats) + 1) # Remplissage de l'identifiant unique if ("UniqueID" in col_to_add_list) or ( "uniqueID" in col_to_add_list) or ("ID" in col_to_add_list): feature.SetField('ID', int(stats_info_list.index(polygone_stats))) # Initialisation à 0 des colonnes contenant le % de répartition de la classe - Verifier ce qu'il se passe si le nom dépasse 10 caracteres if ('all' in col_to_add_list) or ('count' in col_to_add_list): for element in class_label_dico: name_col = class_label_dico[element] if len(name_col) > 10: name_col = name_col[:10] feature.SetField(name_col, 0) # Initialisation à 0 des colonnes contenant la surface correspondant à la classe - Verifier ce qu'il se passe si le nom dépasse 10 caracteres if ('all_S' in col_to_add_list) or ('count_S' in col_to_add_list): for element in class_label_dico: name_col = class_label_dico[element] name_col_area = PREFIX_AREA_COLUMN + name_col if len(name_col_area) > 10: name_col_area = name_col_area[:10] feature.SetField(name_col_area, 0) # Remplissage des colonnes contenant le % de répartition et la surface des classes if ('all' in col_to_add_list) or ('count' in col_to_add_list) or ( 'all_S' in col_to_add_list) or ('count_S' in col_to_add_list): # 'all' est une liste des couples : (Valeur_du_pixel_sur_le_raster, Nbr_pixel_ayant_cette_valeur) pour le polygone observe. # Ex : [(0,183),(803,45),(801,4)] : dans le polygone, il y a 183 pixels de valeur 0, 45 pixels de valeur 803 et 4 pixels de valeur 801 majority_all = polygone_stats['all'] # Deux valeurs de pixel peuvent faire référence à une même colonne. Par exemple : les pixels à 201, 202, 203 peuvent correspondre à la BD Topo # Regroupement des éléments de majority_all allant dans la même colonne au regard de class_label_dico count_for_idx_couple = 0 # Comptage du nombre de modifications (suppression de couple) de majority_all pour adapter la valeur de l'index lors de son parcours for idx_couple in range( 1, len(majority_all) ): # Inutile d'appliquer le traitement au premier élément (idx_couple == 0) idx_couple = idx_couple - count_for_idx_couple # Prise en compte dans le parcours de majority_all des couples supprimés couple = majority_all[idx_couple] # Ex : couple = (803,45) if (couple is None) or ( couple == "" ): # en cas de bug de rasterstats (erreur geometrique du polygone par exemple) if debug >= 3: print( cyan + "statisticsVectorRaster() : " + bold + red + "Probleme detecte dans la gestion du polygone %s" % (polygone_count) + endC, file=sys.stderr) pass else: for idx_verif in range(idx_couple): # Vérification au regard des éléments présents en amont dans majority_all # Cas où le nom correspondant au label a déjà été rencontré dans majority_all # Vérification que les pixels de l'image sont réferncés dans le dico if couple[0] in class_label_dico: if class_label_dico[couple[0]] == class_label_dico[ majority_all[idx_verif][0]]: majority_all[idx_verif] = ( majority_all[idx_verif][0], majority_all[idx_verif][1] + couple[1] ) # Ajout du nombre de pixels correspondant dans le couple précédent majority_all.remove( couple ) # Supression du couple présentant le "doublon" count_for_idx_couple = count_for_idx_couple + 1 # Mise à jour du décompte de modifications break else: raise NameError( cyan + "statisticsVectorRaster() : " + bold + red + "The image file (%s) contain pixel value '%d' not identified into class_label_dico" % (image_input, couple[0]) + endC) # Intégration des valeurs de majority all dans les colonnes for couple_value_count in majority_all: # Parcours de majority_all. Ex : couple_value_count = (803,45) if (couple_value_count is None) or ( couple_value_count == "" ): # en cas de bug de rasterstats (erreur geometrique du polygone par exemple) if debug >= 3: print( cyan + "statisticsVectorRaster() : " + bold + red + "Probleme detecte dans la gestion du polygone %s" % (polygone_count) + endC, file=sys.stderr) pass else: nb_pixel_total = polygone_stats[ 'count'] # Nbr de pixels du polygone pixel_value = couple_value_count[0] # Valeur du pixel value_count = couple_value_count[ 1] # Nbr de pixels ayant cette valeur name_col = class_label_dico[ pixel_value] # Transformation de la valeur du pixel en "signification" au regard du dictionnaire. Ex : BD Topo ou 2011 name_col_area = PREFIX_AREA_COLUMN + name_col # Identification du nom de la colonne en surfaces if len(name_col) > 10: name_col = name_col[:10] if len(name_col_area) > 10: name_col_area = name_col_area[:10] value_area = pixel_size * value_count # Calcul de la surface du polygone correspondant à la valeur du pixel if nb_pixel_total != None and nb_pixel_total != 0: percentage = ( float(value_count) / float(nb_pixel_total) ) * 100 # Conversion de la surface en pourcentages, arondi au pourcent else: if debug >= 3: print( cyan + "statisticsVectorRaster() : " + bold + red + "Probleme dans l'identification du nombre de pixels du polygone %s : le pourcentage de %s est mis à 0" % (polygone_count, name_col) + endC, file=sys.stderr) percentage = 0.0 if ('all' in col_to_add_list) or ('count' in col_to_add_list): feature.SetField( name_col, percentage ) # Injection du pourcentage dans la colonne correpondante if ('all_S' in col_to_add_list) or ('count_S' in col_to_add_list): feature.SetField( name_col_area, value_area ) # Injection de la surface dans la colonne correpondante else: pass # Remplissage des colonnes statistiques demandées ( col_to_add_inter01_list = [majority/DateMaj/SrcMaj, minority, min, max, mean, median, sum, std, unique, range] ) for stats in col_to_add_inter01_list: if stats == 'DateMaj' or stats == 'SrcMaj': # Cas particulier de 'DateMaj' et 'SrcMaj' : le nom de la colonne est DateMaj ou SrcMaj, mais la statistique utilisée est identifiée par majority name_col = stats # Nom de la colonne. Ex : 'DateMaj' value_statis = polygone_stats[ 'majority'] # Valeur majoritaire. Ex : '203' if value_statis == None: value_statis_class = 'nan' else: value_statis_class = class_label_dico[ value_statis] # Transformation de la valeur au regard du dictionnaire. Ex : '2011' feature.SetField(name_col, value_statis_class) # Ajout dans la colonne elif (stats is None) or (stats == "") or ( polygone_stats[stats] is None) or (polygone_stats[stats]) == "" or ( polygone_stats[stats]) == 'nan': # En cas de bug de rasterstats (erreur geometrique du polygone par exemple) pass else: name_col = stats # Nom de la colonne. Ex : 'majority', 'max' value_statis = polygone_stats[ stats] # Valeur à associer à la colonne, par exemple '2011' if ( name_col == 'majority' or name_col == 'minority' ) and class_label_dico != []: # Cas où la colonne fait référence à une valeur du dictionnaire value_statis_class = class_label_dico[value_statis] else: value_statis_class = value_statis feature.SetField(name_col, value_statis_class) layer.SetFeature(feature) feature.Destroy() if debug >= 2: print(cyan + "statisticsVectorRaster() : " + bold + green + "ETAPE 2/3 : FIN DU REMPLISSAGE DES COLONNES DU VECTEUR %s" % (vector_output) + endC) # ETAPE 4/4 : SUPRESSION DES COLONNES NON SOUHAITEES if col_to_delete_list != []: if debug >= 2: print(cyan + "statisticsVectorRaster() : " + bold + green + "ETAPE 3/3 : DEBUT DES SUPPRESSIONS DES COLONNES %s" % (col_to_delete_list) + endC) for col_to_delete in col_to_delete_list: if layer_definition.GetFieldIndex( col_to_delete ) != -1: # Vérification de l'existence de la colonne col (retour = -1 : elle n'existe pas) layer.DeleteField(layer_definition.GetFieldIndex( col_to_delete)) # Suppression de la colonne if debug >= 3: print(cyan + "statisticsVectorRaster() : " + endC + "Suppression de %s" % (col_to_delete) + endC) if debug >= 2: print(cyan + "statisticsVectorRaster() : " + bold + green + "ETAPE 3/3 : FIN DE LA SUPPRESSION DES COLONNES" + endC) else: print(cyan + "statisticsVectorRaster() : " + bold + yellow + "ETAPE 3/3 : AUCUNE SUPPRESSION DE COLONNE DEMANDEE" + endC) # Fermeture du fichier shape layer.SyncToDisk() layer = None data_source.Destroy() # Mise à jour du Log ending_event = "statisticsVectorRaster() : Compute statistic crossing ending : " timeLine(path_time_log, ending_event) return
def createMask(image_input, vector_samples_input, image_masked, path_time_log, save_results_intermediate=False, overwrite=True): # Mise à jour du Log starting_event = "createMask() : Masks creation starting : " timeLine(path_time_log, starting_event) print(endC) print(bold + green + "## START : MASQUES CREATION" + endC) print(endC) CODAGE = "uint8" if debug >= 2: print(bold + green + "createMask() : Variables dans la fonction" + endC) print(cyan + "createMask() : " + endC + "image_input : " + str(image_input) + endC) print(cyan + "createMask() : " + endC + "vector_samples_input : " + str(vector_samples_input) + endC) print(cyan + "createMask() : " + endC + "image_masked : " + str(image_masked) + endC) print(cyan + "createMask() : " + endC + "path_time_log : " + str(path_time_log) + endC) print(cyan + "createMask() : " + endC + "save_results_intermediate : " + str(save_results_intermediate) + endC) print(cyan + "createMask() : " + endC + "overwrite : " + str(overwrite) + endC) # RASTERIZATION DES VECTEURS D'APPRENTISSAGE # VERIFICATION SI LE MASQUE DE SORTIE EXISTE DEJA # Si un fichier de sortie avec le même nom existe déjà, et si l'option ecrasement est à false, alors passe au masque suivant check = os.path.isfile(image_masked) if check and not overwrite: print( bold + yellow + "createMask() : " + endC + "Computing mask from %s with %s already done : no actualisation" % (image_input, vector_samples_input) + endC) # Si non, ou si la fonction ecrasement est désative, alors on le calcule else: if check: try: # Suppression de l'éventuel fichier existant removeFile(image_masked) except Exception: pass # Si le fichier ne peut pas être supprimé, on suppose qu'il n'existe pas et on passe à la suite # EXTRACTION DU MASQUE print(bold + green + "createMask() : " + endC + "Computing mask from %s with %s " % (image_input, vector_samples_input) + endC) rasterizeBinaryVector(vector_samples_input, image_input, image_masked, 1, CODAGE) print(bold + green + "createMask() : " + endC + "Computing mask from %s with %s completed" % (image_input, vector_samples_input) + endC) print(endC) print(bold + green + "## END : MASQUES CREATION" + endC) print(endC) # Mise à jour du Log ending_event = "createMask() : Masks creation ending : " timeLine(path_time_log, ending_event) return