def addDataBaseExo(image_input, image_classif_add_output, class_file_dico, class_buffer_dico, class_sql_dico, path_time_log, format_vector='ESRI Shapefile', extension_raster=".tif", extension_vector=".shp", save_results_intermediate=False, overwrite=True, simplifie_param=10.0): # Mise à jour du Log starting_event = "addDataBaseExo() : Add data base exogene to classification starting : " timeLine(path_time_log, starting_event) # Print if debug >= 3: print(bold + green + "Variables dans la fonction" + endC) print(cyan + "addDataBaseExo() : " + endC + "image_input : " + str(image_input) + endC) print(cyan + "addDataBaseExo() : " + endC + "image_classif_add_output : " + str(image_classif_add_output) + endC) print(cyan + "addDataBaseExo() : " + endC + "class_file_dico : " + str(class_file_dico) + endC) print(cyan + "addDataBaseExo() : " + endC + "class_buffer_dico : " + str(class_buffer_dico) + endC) print(cyan + "addDataBaseExo() : " + endC + "class_sql_dico : " + str(class_sql_dico) + endC) print(cyan + "addDataBaseExo() : " + endC + "path_time_log : " + str(path_time_log) + endC) print(cyan + "addDataBaseExo() : " + endC + "format_vector : " + str(format_vector) + endC) print(cyan + "addDataBaseExo() : " + endC + "extension_raster : " + str(extension_raster) + endC) print(cyan + "addDataBaseExo() : " + endC + "extension_vector : " + str(extension_vector) + endC) print(cyan + "addDataBaseExo() : " + endC + "save_results_intermediate : " + str(save_results_intermediate) + endC) print(cyan + "addDataBaseExo() : " + endC + "overwrite : " + str(overwrite) + endC) # Constantes FOLDER_MASK_TEMP = 'Mask_' FOLDER_FILTERING_TEMP = 'Filt_' FOLDER_CUTTING_TEMP = 'Cut_' FOLDER_BUFF_TEMP = 'Buff_' SUFFIX_MASK_CRUDE = '_mcrude' SUFFIX_MASK = '_mask' SUFFIX_FUSION = '_info' SUFFIX_VECTOR_FILTER = "_filt" SUFFIX_VECTOR_CUT = '_decoup' SUFFIX_VECTOR_BUFF = '_buff' CODAGE = "uint16" # ETAPE 1 : NETTOYER LES DONNEES EXISTANTES if debug >= 2: print(cyan + "addDataBaseExo() : " + bold + green + "NETTOYAGE ESPACE DE TRAVAIL..." + endC) # Nom de base de l'image image_name = os.path.splitext(os.path.basename(image_input))[0] # Nettoyage d'anciennes données résultat # Si le fichier résultat existent deja et que overwrite n'est pas activé check = os.path.isfile(image_classif_add_output) if check and not overwrite: print(bold + yellow + "addDataBaseExo() : " + endC + image_classif_add_output + " has already added bd exo and will not be added again." + endC) else: if check: try: removeFile(image_classif_add_output ) # Tentative de suppression du fichier except Exception: pass # Si le fichier ne peut pas être supprimé, on suppose qu'il n'existe pas et on passe à la suite # Définition des répertoires temporaires repertory_output = os.path.dirname(image_classif_add_output) repertory_mask_temp = repertory_output + os.sep + FOLDER_MASK_TEMP + image_name repertory_samples_filtering_temp = repertory_output + os.sep + FOLDER_FILTERING_TEMP + image_name repertory_samples_cutting_temp = repertory_output + os.sep + FOLDER_CUTTING_TEMP + image_name repertory_samples_buff_temp = repertory_output + os.sep + FOLDER_BUFF_TEMP + image_name if debug >= 4: print(repertory_mask_temp) print(repertory_samples_filtering_temp) print(repertory_samples_cutting_temp) print(repertory_samples_buff_temp) # Creer les répertoires temporaire si ils n'existent pas if not os.path.isdir(repertory_output): os.makedirs(repertory_output) if not os.path.isdir(repertory_mask_temp): os.makedirs(repertory_mask_temp) if not os.path.isdir(repertory_samples_filtering_temp): os.makedirs(repertory_samples_filtering_temp) if not os.path.isdir(repertory_samples_cutting_temp): os.makedirs(repertory_samples_cutting_temp) if not os.path.isdir(repertory_samples_buff_temp): os.makedirs(repertory_samples_buff_temp) # Nettoyer les répertoires temporaire si ils ne sont pas vide cleanTempData(repertory_mask_temp) cleanTempData(repertory_samples_filtering_temp) cleanTempData(repertory_samples_cutting_temp) cleanTempData(repertory_samples_buff_temp) if debug >= 2: print(cyan + "addDataBaseExo() : " + bold + green + "... FIN NETTOYAGE" + endC) # ETAPE 2 : CREER UN SHAPE DE DECOUPE if debug >= 2: print(cyan + "addDataBaseExo() : " + bold + green + "SHAPE DE DECOUPE..." + endC) # 2.1 : Création des masques délimitant l'emprise de la zone par image vector_mask = repertory_mask_temp + os.sep + image_name + SUFFIX_MASK_CRUDE + extension_vector createVectorMask(image_input, vector_mask) # 2.2 : Simplification du masque global vector_simple_mask_cut = repertory_mask_temp + os.sep + image_name + SUFFIX_MASK + extension_vector simplifyVector(vector_mask, vector_simple_mask_cut, simplifie_param, format_vector) if debug >= 2: print(cyan + "addDataBaseExo() : " + bold + green + "...FIN SHAPE DE DECOUPEE" + endC) # ETAPE 3 : DECOUPER BUFFERISER LES VECTEURS ET FUSIONNER if debug >= 2: print(cyan + "addDataBaseExo() : " + bold + green + "MISE EN PLACE DES TAMPONS..." + endC) image_combined_list = [] # Parcours du dictionnaire associant les macroclasses aux noms de fichiers for macroclass_label in class_file_dico: vector_fusion_list = [] for index_info in range(len(class_file_dico[macroclass_label])): input_vector = class_file_dico[macroclass_label][index_info] vector_name = os.path.splitext( os.path.basename(input_vector))[0] output_vector_filtered = repertory_samples_filtering_temp + os.sep + vector_name + SUFFIX_VECTOR_FILTER + extension_vector output_vector_cut = repertory_samples_cutting_temp + os.sep + vector_name + SUFFIX_VECTOR_CUT + extension_vector output_vector_buff = repertory_samples_buff_temp + os.sep + vector_name + SUFFIX_VECTOR_BUFF + extension_vector sql_expression = class_sql_dico[macroclass_label][index_info] buffer_str = class_buffer_dico[macroclass_label][index_info] buff = 0.0 col_name_buf = "" try: buff = float(buffer_str) except: col_name_buf = buffer_str print( cyan + "addDataBaseExo() : " + bold + green + "Pas de valeur buffer mais un nom de colonne pour les valeur à bufferiser : " + endC + col_name_buf) if os.path.isfile(input_vector): if debug >= 3: print(cyan + "addDataBaseExo() : " + endC + "input_vector : " + str(input_vector) + endC) print(cyan + "addDataBaseExo() : " + endC + "output_vector_filtered : " + str(output_vector_filtered) + endC) print(cyan + "addDataBaseExo() : " + endC + "output_vector_cut : " + str(output_vector_cut) + endC) print(cyan + "addDataBaseExo() : " + endC + "output_vector_buff : " + str(output_vector_buff) + endC) print(cyan + "addDataBaseExo() : " + endC + "buff : " + str(buff) + endC) print(cyan + "addDataBaseExo() : " + endC + "sql : " + str(sql_expression) + endC) # 3.0 : Recuperer les vecteurs d'entrée et filtree selon la requete sql par ogr2ogr if sql_expression != "": names_attribut_list = getAttributeNameList( input_vector, format_vector) column = "'" for name_attribut in names_attribut_list: column += name_attribut + ", " column = column[0:len(column) - 2] column += "'" ret = filterSelectDataVector(input_vector, output_vector_filtered, column, sql_expression, format_vector) if not ret: print( cyan + "addDataBaseExo() : " + bold + yellow + "Attention problème lors du filtrage des BD vecteurs l'expression SQL %s est incorrecte" % (sql_expression) + endC) output_vector_filtered = input_vector else: print(cyan + "addDataBaseExo() : " + bold + green + "Pas de filtrage sur le fichier du nom : " + endC + output_vector_filtered) output_vector_filtered = input_vector # 3.1 : Découper le vecteur selon l'empise de l'image d'entrée cutoutVectors(vector_simple_mask_cut, [output_vector_filtered], [output_vector_cut], format_vector) # 3.2 : Bufferiser lesvecteurs découpé avec la valeur défini dans le dico ou trouver dans la base du vecteur lui même si le nom de la colonne est passée dans le dico if os.path.isfile(output_vector_cut) and ( (buff != 0) or (col_name_buf != "")): bufferVector(output_vector_cut, output_vector_buff, buff, col_name_buf, 1.0, 10, format_vector) else: print(cyan + "addDataBaseExo() : " + bold + green + "Pas de buffer sur le fichier du nom : " + endC + output_vector_cut) output_vector_buff = output_vector_cut # 3.3 : Si un shape résulat existe l'ajouté à la liste de fusion if os.path.isfile(output_vector_buff): vector_fusion_list.append(output_vector_buff) if debug >= 3: print("file for fusion : " + output_vector_buff) else: print(bold + yellow + "pas de fichiers avec ce nom : " + endC + output_vector_buff) else: print(cyan + "addDataBaseExo() : " + bold + yellow + "Pas de fichier du nom : " + endC + input_vector) # 3.4 : Fusionner les shapes transformés d'une même classe, rasterization et labelisations des vecteurs # Si une liste de fichier shape existe if not vector_fusion_list: print(bold + yellow + "Pas de fusion sans donnee a fusionnee" + endC) else: # Rasterization et BandMath des fichiers shapes raster_list = [] for vector in vector_fusion_list: if debug >= 3: print(cyan + "addDataBaseExo() : " + endC + "Rasterization : " + vector + " label : " + macroclass_label) raster_output = os.path.splitext( vector)[0] + extension_raster # Rasterisation rasterizeBinaryVector(vector, image_input, raster_output, macroclass_label, CODAGE) raster_list.append(raster_output) if debug >= 3: print(cyan + "addDataBaseExo() : " + endC + "nombre d'images a combiner : " + str(len(raster_list))) # Liste les images raster combined and sample image_combined = repertory_output + os.sep + image_name + '_' + str( macroclass_label) + SUFFIX_FUSION + extension_raster image_combined_list.append(image_combined) # Fusion des images raster en une seule mergeListRaster(raster_list, image_combined, CODAGE) if debug >= 2: print(cyan + "addDataBaseExo() : " + bold + green + "FIN DE L AFFECTATION DES TAMPONS" + endC) # ETAPE 4 : ASSEMBLAGE DE L'IMAGE CLASSEE ET DES BD EXOS if debug >= 2: print(cyan + "addDataBaseExo() : " + bold + green + "ASSEMBLAGE..." + endC) # Ajout de l'image de classification a la liste des image bd conbinées image_combined_list.append(image_input) # Fusion les images avec la classification mergeListRaster(image_combined_list, image_classif_add_output, CODAGE) if debug >= 2: print(cyan + "addDataBaseExo() : " + bold + green + "FIN" + endC) # ETAPE 5 : SUPPRESIONS FICHIERS INTERMEDIAIRES INUTILES # Suppression des données intermédiaires if not save_results_intermediate: image_combined_list.remove(image_input) for to_delete in image_combined_list: removeFile(to_delete) # Suppression des repertoires temporaires deleteDir(repertory_mask_temp) deleteDir(repertory_samples_filtering_temp) deleteDir(repertory_samples_cutting_temp) deleteDir(repertory_samples_buff_temp) # Mise à jour du Log ending_event = "addDataBaseExo() : Add data base exogene to classification ending : " timeLine(path_time_log, ending_event) return
def runTDCSeuil(input_im_seuils_dico, output_dir, input_sea_points, input_cut_vector, input_emprise_vector, simplif, is_calc_indice_image, attribute_val_limite, attribute_val_proced, attribute_val_datepr, attribute_val_precis, attribute_val_contac, attribute_val_type, no_data_value, path_time_log, channel_order=['Red', 'Green', 'Blue', 'NIR'], epsg=2154, format_raster='GTiff', format_vector="ESRI Shapefile", extension_raster=".tif", extension_vector=".shp", save_results_intermediate=True, overwrite=True): # Mise à jour du Log starting_event = "runTDCSeuil() : Select TDC Seuil starting : " timeLine(path_time_log, starting_event) # Affichage des paramètres if debug >= 3: print(bold + green + "Variables dans runTDCSeuil - Variables générales" + endC) print(cyan + "runTDCSeuil() : " + endC + "input_im_seuils_dico : " + str(input_im_seuils_dico) + endC) print(cyan + "runTDCSeuil() : " + endC + "output_dir : " + str(output_dir) + endC) print(cyan + "runTDCSeuil() : " + endC + "input_sea_points : " + str(input_sea_points) + endC) print(cyan + "runTDCSeuil() : " + endC + "input_cut_vector : " + str(input_cut_vector) + endC) print(cyan + "runTDCSeuil() : " + endC + "input_emprise_vector : " + str(input_emprise_vector) + endC) print(cyan + "runTDCSeuil() : " + endC + "simplif : " + str(simplif) + endC) print(cyan + "runTDCSeuil() : " + endC + "is_calc_indice_image : " + str(is_calc_indice_image) + endC) print(cyan + "runTDCSeuil() : " + endC + "attribute_val_limite : " + str(attribute_val_limite) + endC) print(cyan + "runTDCSeuil() : " + endC + "attribute_val_proced : " + str(attribute_val_proced) + endC) print(cyan + "runTDCSeuil() : " + endC + "attribute_val_datepr : " + str(attribute_val_datepr) + endC) print(cyan + "runTDCSeuil() : " + endC + "attribute_val_precis : " + str(attribute_val_precis) + endC) print(cyan + "runTDCSeuil() : " + endC + "attribute_val_contac : " + str(attribute_val_contac) + endC) print(cyan + "runTDCSeuil() : " + endC + "attribute_val_type : " + str(attribute_val_type) + endC) print(cyan + "runTDCSeuil() : " + endC + "no_data_value : " + str(no_data_value) + endC) print(cyan + "runTDCSeuil() : " + endC + "path_time_log : " + str(path_time_log) + endC) print(cyan + "runTDCSeuil() : " + endC + "channel_order: " + str(channel_order) + endC) print(cyan + "runTDCSeuil() : " + endC + "epsg : " + str(epsg) + endC) print(cyan + "runTDCSeuil() : " + endC + "format_raster : " + str(format_raster) + endC) print(cyan + "runTDCSeuil() : " + endC + "format_vector : " + str(format_vector) + endC) print(cyan + "runTDCSeuil() : " + endC + "extension_raster : " + str(extension_raster) + endC) print(cyan + "runTDCSeuil() : " + endC + "extension_vector : " + str(extension_vector) + endC) print(cyan + "runTDCSeuil() : " + endC + "save_results_intermediate : " + str(save_results_intermediate) + endC) print(cyan + "runTDCSeuil() : " + endC + "overwrite : " + str(overwrite) + endC) # Initialisation des constantes AUTO = "auto" POS_NUMERO_DOSSIER = 2 REP_NDVI_TDC_SEUIL = "ndvi_TDCSeuil" REP_TEMP_BIN_MASK_V = "Temp_Binary_Mask_Vector_" ATTR_NAME_REFDOSSIER = "RefDossier" ATTR_NAME_NOMIMAGE = "NomImage" ATTR_NAME_DATEACQUI = "DateAcqui" ATTR_NAME_HEUREACQUI = "HeureAcqui" ATTR_NAME_LIMITE = "TdcLimite" ATTR_NAME_PROCED = "TdcProced" ATTR_NAME_DATEPR = "TdcDatepro" ATTR_NAME_PRECIS = "TdcPrecis" ATTR_NAME_CONTAC = "TdcContact" ATTR_NAME_TYPE = "Type" # Repertoire NDVI à conserver!!! repertory_ndvi = output_dir + os.sep + REP_NDVI_TDC_SEUIL repertory_temp_list = [] # Création du répertoire de sortie s'il n'existe pas déjà if not os.path.exists(output_dir): os.makedirs(output_dir) # Création du répertoire de sortie temporaire s'il n'existe pas déjà if not os.path.exists(repertory_ndvi): os.makedirs(repertory_ndvi) # Exploitation du fichier emprise pour renseigner les informations des attribues res_values_dico = {} if input_emprise_vector != "": # Lecture des attributs de fichier vecteur names_attribut_list = getAttributeNameList(input_emprise_vector, format_vector) attribute_name_dico = {} for name_attribut in names_attribut_list: attribute_name_dico[name_attribut] = getAttributeType( input_emprise_vector, name_attribut, format_vector) res_values_dico = getAttributeValues(input_emprise_vector, None, None, attribute_name_dico, format_vector) # On calcule plusieurs seuils par image, mais différents selon les images bin_mask_list = [] images_list = [] nb_images = len(input_im_seuils_dico.split()) image_first_and_seuils = input_im_seuils_dico.split()[0] # Création d'une liste d'image for elt in input_im_seuils_dico.split(): images_list.append(elt.split(":")[0]) if ":" not in image_first_and_seuils: print( cyan + "runTDCSeuil() : " + red + bold + "Aucun seuil spécifié ! (Nécessité d'au moins un pour la 1ère image)" + endC, file=sys.stderr) sys.exit(1) else: seuils_first_image_list = image_first_and_seuils.split(":")[1].split( ",") for i in range(nb_images): # Chaque image + seuils (exemple : /path/image_xx.tif:0.1,0,-0.1) image_index_and_seuils = input_im_seuils_dico.split()[i] seuils_index_image_list = image_index_and_seuils.split(":")[1].split( ",") # L'image à traiter input_image = image_index_and_seuils.split(":")[0] image_name = os.path.splitext(os.path.basename(input_image))[0] # Création du répertoire temporaire de calcul repertory_temp = output_dir + os.sep + REP_TEMP_BIN_MASK_V + image_name if not os.path.exists(repertory_temp): os.makedirs(repertory_temp) repertory_temp_list.append(repertory_temp) # Initialisation des champs des attributs num_dossier = image_name.split("_")[POS_NUMERO_DOSSIER] attribute_val_refdossier = num_dossier attribute_val_nomimage = image_name attribute_val_datecqui = " " attribute_val_heureacqui = " " if attribute_val_limite == "": attribute_val_limite = " " if attribute_val_proced == "": attribute_val_proced = " " if attribute_val_datepr == "": now = datetime.datetime.now() attribute_val_datepr = now.strftime("%Y-%m-%d") if attribute_val_precis == "": attribute_val_precis = " " if attribute_val_contac == "": attribute_val_contac = " " if attribute_val_type == "": attribute_val_type = " " # Cas ou un fichier d'emprise contenant des données des attributs est present et contient un champs "RefDossier" if ATTR_NAME_REFDOSSIER in res_values_dico: if num_dossier in res_values_dico[ATTR_NAME_REFDOSSIER]: index_dossier = res_values_dico[ATTR_NAME_REFDOSSIER].index( num_dossier) if ATTR_NAME_NOMIMAGE in res_values_dico: attribute_val_nomimage = res_values_dico[ ATTR_NAME_NOMIMAGE][index_dossier] if ATTR_NAME_DATEACQUI in res_values_dico: datecqui_list = res_values_dico[ATTR_NAME_DATEACQUI][ index_dossier] attribute_val_datecqui = str(datecqui_list[0]) + "-" + str( datecqui_list[1]) + "-" + str(datecqui_list[2]) if ATTR_NAME_HEUREACQUI in res_values_dico: attribute_val_heureacqui = res_values_dico[ ATTR_NAME_HEUREACQUI][index_dossier] # Initialisation de StructAttribute pour la création des champs attributes_list = [StructAttribute(ATTR_NAME_REFDOSSIER, ogr.OFTString, 20, attribute_val_refdossier), \ StructAttribute(ATTR_NAME_NOMIMAGE, ogr.OFTString, 20, attribute_val_nomimage), \ StructAttribute(ATTR_NAME_DATEACQUI, ogr.OFTDate, None,attribute_val_datecqui), \ StructAttribute(ATTR_NAME_HEUREACQUI, ogr.OFTString, 14, attribute_val_heureacqui), \ StructAttribute(ATTR_NAME_LIMITE, ogr.OFTString, 20, attribute_val_limite), \ StructAttribute(ATTR_NAME_PROCED, ogr.OFTString, 30, attribute_val_proced), \ StructAttribute(ATTR_NAME_DATEPR, ogr.OFTString, 14, attribute_val_datepr), \ StructAttribute(ATTR_NAME_PRECIS, ogr.OFTString, 20, attribute_val_precis), \ StructAttribute(ATTR_NAME_CONTAC, ogr.OFTString, 20, attribute_val_contac), \ StructAttribute(ATTR_NAME_TYPE, ogr.OFTString, 14, attribute_val_type)] # Calcul de l'image NDVI si is_calc_indice_image est à True if is_calc_indice_image: image_index = repertory_ndvi + os.sep + "image_NDVI_" + os.path.splitext( os.path.basename(images_list[i]))[0] + extension_raster if not os.path.exists(input_image): print(cyan + "runTDCSeuil() : " + red + bold + "L'image renseignée en entrée : " + input_image + " n'existe pas. Vérifiez le chemin !" + endC, file=sys.stderr) sys.exit(1) createNDVI(input_image, image_index, channel_order) else: image_index = seuils_index_image_list[0] if os.path.splitext(image_index)[1] != extension_raster: print( cyan + "runTDCSeuil() : " + red + bold + "Si vous choisissez de calculer l'image NDVI, mettre l'option -c. Sinon, le 1er paramètre derrière \":\" dans -isd doit être l'image indice (.tif)" + endC, file=sys.stderr) sys.exit(1) if ":" not in image_index_and_seuils: if is_calc_indice_image: for t in seuils_first_image_list: if t == AUTO: seuils_list = runCalculSeuil( image_index, output_dir, save_results_intermediate) # Masque centre classe bin_mask_cc = binaryMaskVect( image_index, repertory_temp, float(seuils_list[0]), input_cut_vector, attributes_list, no_data_value, epsg, format_raster, format_vector, extension_raster, extension_vector, save_results_intermediate, overwrite) # Masque borne inf bin_mask_bi = binaryMaskVect( image_index, repertory_temp, float(v[1]), input_cut_vector, attributes_list, no_data_value, epsg, format_raster, format_vector, extension_raster, extension_vector, save_results_intermediate, overwrite) # Ajout des masques à la liste bin_mask_list.append(bin_mask_cc) bin_mask_list.append(bin_mask_bi) else: bin_mask = binaryMaskVect( image_index, repertory_temp, float(t), input_cut_vector, attributes_list, no_data_value, epsg, format_raster, format_vector, extension_raster, extension_vector, save_results_intermediate, overwrite) bin_mask_list.append(bin_mask) else: print(cyan + "runTDCSeuil() : " + red + +bold + "Renseignez les images NDVI associées et les seuils !" + endC, file=sys.stderr) sys.exit(1) else: if is_calc_indice_image: for t in seuils_index_image_list: if t == AUTO: seuils_list = runCalculSeuil( image_index, output_dir, save_results_intermediate) # Masque centre classe bin_mask_cc = binaryMaskVect( image_index, repertory_temp, float(seuils_list[0]), input_cut_vector, attributes_list, no_data_value, epsg, format_raster, format_vector, extension_raster, extension_vector, save_results_intermediate, overwrite) # Masque borne inf bin_mask_bi = binaryMaskVect( image_index, repertory_temp, float(seuils_list[1]), input_cut_vector, attributes_list, no_data_value, epsg, format_raster, format_vector, extension_raster, extension_vector, save_results_intermediate, overwrite) # Ajout des masques à la liste bin_mask_list.append(bin_mask_cc) bin_mask_list.append(bin_mask_bi) else: bin_mask = binaryMaskVect( image_index, repertory_temp, float(t), input_cut_vector, attributes_list, no_data_value, epsg, format_raster, format_vector, extension_raster, extension_vector, save_results_intermediate, overwrite) bin_mask_list.append(bin_mask) else: for j in range(1, len(seuils_index_image_list)): t = seuils_index_image_list[j] if t == AUTO: seuils_list = runCalculSeuil( image_index, output_dir, save_results_intermediate) # Masque centre classe bin_mask_cc = binaryMaskVect( image_index, repertory_temp, float(seuils_list[0]), input_cut_vector, attributes_list, no_data_value, epsg, format_raster, format_vector, extension_raster, extension_vector, save_results_intermediate, overwrite) # Masque borne inf bin_mask_bi = binaryMaskVect( image_index, repertory_temp, float(seuils_list[1]), input_cut_vector, attributes_list, no_data_value, epsg, format_raster, format_vector, extension_raster, extension_vector, save_results_intermediate, overwrite) # Ajout des masques à la liste bin_mask_list.append(bin_mask_cc) bin_mask_list.append(bin_mask_bi) else: bin_mask = binaryMaskVect( image_index, repertory_temp, float(t), input_cut_vector, attributes_list, no_data_value, epsg, format_raster, format_vector, extension_raster, extension_vector, save_results_intermediate, overwrite) bin_mask_list.append(bin_mask) # Constitution du dictionnaire associant chaque image aux vecteurs NDVI associés, pour l'entrée dans PolygonMerToTDC im_ndvivect_dico = "" if is_calc_indice_image: ndvi_mask_index = 0 for i in range(nb_images): # Chaque image + seuils (exemple : /path/image_xx.tif:0.1,0,-0.1) image_index_and_seuils = input_im_seuils_dico.split()[i] input_image = image_index_and_seuils.split(":")[0] seuils_index_image_list = image_index_and_seuils.split( ":")[1].split(",") is_presence_auto = False im_ndvivect_dico += input_image + ":" # Si des seuils sont renseignés seulement pour la 1ère image if ":" not in image_index_and_seuils: # Parcours des seuils de la première image for seuil in seuils_first_image_list: if seuil == AUTO: is_presence_auto = True # S'il y a un seuil à "auto" dans la boucle, on parcourt un tour de plus (auto = borneinf + centre classe) if is_presence_auto == True: nb_iter = len(seuils_first_image_list) else: nb_iter = len(seuils_first_image_list) - 1 for s in range(nb_iter): im_ndvivect_dico += bin_mask_list[ndvi_mask_index] + "," ndvi_mask_index = ndvi_mask_index + 1 im_ndvivect_dico += bin_mask_list[ndvi_mask_index] + " " # Si au moins un seuil est renseigné pour chacune des autres images else: # Parcours des seuils de l'image for seuil in seuils_index_image_list: if seuil == AUTO: is_presence_auto = True # S'il y a un seuil à "auto" dans la boucle, on parcourt un tour de plus (auto = borneinf + centre classe) if is_presence_auto: nb_iter = len(seuils_index_image_list) else: nb_iter = len(seuils_index_image_list) - 1 for s in range(nb_iter): im_ndvivect_dico += bin_mask_list[ndvi_mask_index] + "," ndvi_mask_index = ndvi_mask_index + 1 im_ndvivect_dico += bin_mask_list[ndvi_mask_index] + " " ndvi_mask_index = ndvi_mask_index + 1 else: ndvi_mask_index = 0 for i in range(nb_images): # Chaque image + seuils (exemple : /path/image_xx.tif:0.1,0,-0.1) image_index_and_seuils = input_im_seuils_dico.split()[i] input_image = image_index_and_seuils.split(":")[0] seuils_index_image_list = image_index_and_seuils.split( ":")[1].split(",") is_presence_auto = False im_ndvivect_dico += input_image + ":" if ":" not in image_index_and_seuils: print(cyan + "runTDCSeuil() : " + red + bold + "Renseignez les images NDVI associées et les seuils !" + endC, file=sys.stderr) sys.exit(1) # Si au moins un seuil est renseigné pour chacune des autres images else: # Parcours des seuils de l'image for seuil in seuils_index_image_list: if seuil == AUTO: is_presence_auto = True # S'il y a un seuil à "auto" dans la boucle, on parcourt un tour de plus (auto = borneinf + centre classe) if is_presence_auto: nb_iter = len(seuils_index_image_list) else: nb_iter = len(seuils_index_image_list) - 1 for s in range(1, nb_iter): im_ndvivect_dico += bin_mask_list[ndvi_mask_index] + "," ndvi_mask_index = ndvi_mask_index + 1 im_ndvivect_dico += bin_mask_list[ndvi_mask_index] + " " ndvi_mask_index = ndvi_mask_index + 1 im_ndvivect_dico = im_ndvivect_dico[:-1] tdc_shp = polygonMerToTDC(im_ndvivect_dico, output_dir, input_sea_points, True, simplif, input_cut_vector, 3.5, -3.5, no_data_value, path_time_log, epsg, format_vector, extension_raster, extension_vector, save_results_intermediate, overwrite) # Suppression des répertoires temporaires for repertory_temp in repertory_temp_list: if not save_results_intermediate and os.path.exists(repertory_temp): shutil.rmtree(repertory_temp) # Mise à jour du Log ending_event = "runTDCSeuil() : Select TDC Seuil ending : " timeLine(path_time_log, ending_event) return tdc_shp
def soilOccupationChange(input_plot_vector, output_plot_vector, footprint_vector, input_tx_files_list, evolutions_list=['0:1:11000:10:50:and', '0:1:12000:10:50:and', '0:1:21000:10:50:and', '0:1:22000:10:50:and', '0:1:23000:10:50:and'], class_label_dico={11000:'Bati', 12000:'Route', 21000:'SolNu', 22000:'Eau', 23000:'Vegetation'}, epsg=2154, no_data_value=0, format_raster='GTiff', format_vector='ESRI Shapefile', extension_raster='.tif', extension_vector='.shp', postgis_ip_host='localhost', postgis_num_port=5432, postgis_user_name='postgres', postgis_password='******', postgis_database_name='database', postgis_schema_name='public', postgis_encoding='latin1', path_time_log='', save_results_intermediate=False, overwrite=True): if debug >= 3: print('\n' + bold + green + "Evolution de l'OCS par parcelle - Variables dans la fonction :" + endC) print(cyan + " soilOccupationChange() : " + endC + "input_plot_vector : " + str(input_plot_vector) + endC) print(cyan + " soilOccupationChange() : " + endC + "output_plot_vector : " + str(output_plot_vector) + endC) print(cyan + " soilOccupationChange() : " + endC + "footprint_vector : " + str(footprint_vector) + endC) print(cyan + " soilOccupationChange() : " + endC + "input_tx_files_list : " + str(input_tx_files_list) + endC) print(cyan + " soilOccupationChange() : " + endC + "evolutions_list : " + str(evolutions_list) + endC) print(cyan + " soilOccupationChange() : " + endC + "class_label_dico : " + str(class_label_dico) + endC) print(cyan + " soilOccupationChange() : " + endC + "epsg : " + str(epsg) + endC) print(cyan + " soilOccupationChange() : " + endC + "no_data_value : " + str(no_data_value) + endC) print(cyan + " soilOccupationChange() : " + endC + "format_raster : " + str(format_raster) + endC) print(cyan + " soilOccupationChange() : " + endC + "format_vector : " + str(format_vector) + endC) print(cyan + " soilOccupationChange() : " + endC + "extension_raster : " + str(extension_raster) + endC) print(cyan + " soilOccupationChange() : " + endC + "extension_vector : " + str(extension_vector) + endC) print(cyan + " soilOccupationChange() : " + endC + "postgis_ip_host : " + str(postgis_ip_host) + endC) print(cyan + " soilOccupationChange() : " + endC + "postgis_num_port : " + str(postgis_num_port) + endC) print(cyan + " soilOccupationChange() : " + endC + "postgis_user_name : " + str(postgis_user_name) + endC) print(cyan + " soilOccupationChange() : " + endC + "postgis_password : "******" soilOccupationChange() : " + endC + "postgis_database_name : " + str(postgis_database_name) + endC) print(cyan + " soilOccupationChange() : " + endC + "postgis_schema_name : " + str(postgis_schema_name) + endC) print(cyan + " soilOccupationChange() : " + endC + "postgis_encoding : " + str(postgis_encoding) + endC) print(cyan + " soilOccupationChange() : " + endC + "path_time_log : " + str(path_time_log) + endC) print(cyan + " soilOccupationChange() : " + endC + "save_results_intermediate : " + str(save_results_intermediate) + endC) print(cyan + " soilOccupationChange() : " + endC + "overwrite : " + str(overwrite) + endC + '\n') # Définition des constantes EXTENSION_TEXT = '.txt' SUFFIX_TEMP = '_temp' SUFFIX_CUT = '_cut' AREA_FIELD = 'st_area' GEOM_FIELD = 'geom' # Mise à jour du log starting_event = "soilOccupationChange() : Début du traitement : " timeLine(path_time_log, starting_event) print(cyan + "soilOccupationChange() : " + bold + green + "DEBUT DES TRAITEMENTS" + endC + '\n') # Définition des variables 'basename' output_plot_basename = os.path.splitext(os.path.basename(output_plot_vector))[0] # Définition des variables temp temp_directory = os.path.dirname(output_plot_vector) + os.sep + output_plot_basename + SUFFIX_TEMP plot_vector_cut = temp_directory + os.sep + output_plot_basename + SUFFIX_CUT + extension_vector # Définition des variables PostGIS plot_table = output_plot_basename.lower() # Fichier .txt associé au fichier vecteur de sortie, sur la liste des évolutions quantifiées output_evolution_text_file = os.path.splitext(output_plot_vector)[0] + EXTENSION_TEXT # Nettoyage des traitements précédents if debug >= 3: print(cyan + "soilOccupationChange() : " + endC + "Nettoyage des traitements précédents." + endC + '\n') removeVectorFile(output_plot_vector, format_vector=format_vector) removeFile(output_evolution_text_file) cleanTempData(temp_directory) dropDatabase(postgis_database_name, user_name=postgis_user_name, password=postgis_password, ip_host=postgis_ip_host, num_port=postgis_num_port, schema_name=postgis_schema_name) ############# # Etape 0/2 # Préparation des traitements ############# print(cyan + "soilOccupationChange() : " + bold + green + "ETAPE 0/2 - Début de la préparation des traitements." + endC + '\n') # Découpage du parcellaire à la zone d'étude cutVector(footprint_vector, input_plot_vector, plot_vector_cut, overwrite=overwrite, format_vector=format_vector) # Récupération du nom des champs dans le fichier source (pour isoler les champs nouvellement créés par la suite, et les renommer) attr_names_list_origin = getAttributeNameList(plot_vector_cut, format_vector=format_vector) new_attr_names_list_origin = attr_names_list_origin # Préparation de PostGIS createDatabase(postgis_database_name, user_name=postgis_user_name, password=postgis_password, ip_host=postgis_ip_host, num_port=postgis_num_port, schema_name=postgis_schema_name) print(cyan + "soilOccupationChange() : " + bold + green + "ETAPE 0/2 - Fin de la préparation des traitements." + endC + '\n') ############# # Etape 1/2 # Calculs des statistiques à tx ############# print(cyan + "soilOccupationChange() : " + bold + green + "ETAPE 1/2 - Début des calculs des statistiques à tx." + endC + '\n') len_tx = len(input_tx_files_list) tx = 0 # Boucle sur les fichiers d'entrés à t0+x for input_tx_file in input_tx_files_list: if debug >= 3: print(cyan + "soilOccupationChange() : " + endC + bold + "Calcul des statistiques à tx %s/%s." % (tx+1, len_tx) + endC + '\n') # Statistiques OCS par parcelle statisticsVectorRaster(input_tx_file, plot_vector_cut, "", 1, True, False, False, [], [], class_label_dico, path_time_log, clean_small_polygons=True, format_vector=format_vector, save_results_intermediate=save_results_intermediate, overwrite=overwrite) # Récupération du nom des champs dans le fichier parcellaire (avec les champs créés précédemment dans CVR) attr_names_list_tx = getAttributeNameList(plot_vector_cut, format_vector=format_vector) # Isolement des nouveaux champs issus du CVR fields_name_list = [] for attr_name in attr_names_list_tx: if attr_name not in new_attr_names_list_origin: fields_name_list.append(attr_name) # Gestion des nouveaux noms des champs issus du CVR new_fields_name_list = [] for field_name in fields_name_list: new_field_name = 't%s_' % tx + field_name new_field_name = new_field_name[:10] new_fields_name_list.append(new_field_name) new_attr_names_list_origin.append(new_field_name) # Renommage des champs issus du CVR, pour le relancer par la suite sur d'autres dates renameFieldsVector(plot_vector_cut, fields_name_list, new_fields_name_list, format_vector=format_vector) tx += 1 print(cyan + "soilOccupationChange() : " + bold + green + "ETAPE 1/2 - Fin des calculs des statistiques à tx." + endC + '\n') ############# # Etape 2/2 # Caractérisation des changements ############# print(cyan + "soilOccupationChange() : " + bold + green + "ETAPE 2/2 - Début de la caractérisation des changements." + endC + '\n') # Pré-traitements dans PostGIS plot_table = importVectorByOgr2ogr(postgis_database_name, plot_vector_cut, plot_table, user_name=postgis_user_name, password=postgis_password, ip_host=postgis_ip_host, num_port=postgis_num_port, schema_name=postgis_schema_name, epsg=epsg, codage=postgis_encoding) connection = openConnection(postgis_database_name, user_name=postgis_user_name, password=postgis_password, ip_host=postgis_ip_host, num_port=postgis_num_port, schema_name=postgis_schema_name) # Requête SQL pour le calcul de la surface des parcelles sql_query = "ALTER TABLE %s ADD COLUMN %s REAL;\n" % (plot_table, AREA_FIELD) sql_query += "UPDATE %s SET %s = ST_Area(%s);\n" % (plot_table, AREA_FIELD, GEOM_FIELD) # Boucle sur les évolutions à quantifier temp_field = 1 for evolution in evolutions_list: evolution_split = evolution.split(':') idx_bef = int(evolution_split[0]) idx_aft = int(evolution_split[1]) label = int(evolution_split[2]) evol = abs(int(evolution_split[3])) evol_s = abs(int(evolution_split[4])) combi = evolution_split[5] class_name = class_label_dico[label] def_evo_field = "def_evo_%s" % str(temp_field) if debug >= 3: print(cyan + "soilOccupationChange() : " + endC + bold + "Caractérisation des changements t%s/t%s pour la classe '%s' (%s)." % (idx_bef, idx_aft, class_name, label) + endC + '\n') if evol != 0 or evol_s != 0: # Gestion de l'évolution via le taux evol_str = str(evol) + ' %' evo_field = "evo_%s" % str(temp_field) t0_field = 't%s_' % idx_bef + class_name.lower()[:7] t1_field = 't%s_' % idx_aft + class_name.lower()[:7] # Gestion de l'évolution via la surface evol_s_str = str(evol_s) + ' m²' evo_s_field = "evo_s_%s" % str(temp_field) t0_s_field = 't%s_s_' % idx_bef + class_name.lower()[:5] t1_s_field = 't%s_s_' % idx_aft + class_name.lower()[:5] # Requête SQL pour le calcul brut de l'évolution sql_query += "ALTER TABLE %s ADD COLUMN %s REAL;\n" % (plot_table, evo_field) sql_query += "UPDATE %s SET %s = %s - %s;\n" % (plot_table, evo_field, t1_field, t0_field) sql_query += "ALTER TABLE %s ADD COLUMN %s REAL;\n" % (plot_table, evo_s_field) sql_query += "UPDATE %s SET %s = %s - %s;\n" % (plot_table, evo_s_field, t1_s_field, t0_s_field) sql_query += "ALTER TABLE %s ADD COLUMN %s VARCHAR;\n" % (plot_table, def_evo_field) sql_query += "UPDATE %s SET %s = 't%s a t%s - %s - aucune evolution';\n" % (plot_table, def_evo_field, idx_bef, idx_aft, class_name) # Si évolution à la fois via taux et via surface if evol != 0 and evol_s != 0: text_evol = "taux à %s" % evol_str if combi == 'and': text_evol += " ET " elif combi == 'or': text_evol += " OU " text_evol += "surface à %s" % evol_s_str sql_where_pos = "%s >= %s %s %s >= %s" % (evo_field, evol, combi, evo_s_field, evol_s) sql_where_neg = "%s <= -%s %s %s <= -%s" % (evo_field, evol, combi, evo_s_field, evol_s) # Si évolution uniquement via taux elif evol != 0: text_evol = "taux à %s" % evol_str sql_where_pos = "%s >= %s" % (evo_field, evol) sql_where_neg = "%s <= -%s" % (evo_field, evol) # Si évolution uniquement via surface elif evol_s != 0: text_evol = "surface à %s" % evol_s_str sql_where_pos = "%s >= %s" % (evo_s_field, evol_s) sql_where_neg = "%s <= -%s" % (evo_s_field, evol_s) sql_query += "UPDATE %s SET %s = 't%s a t%s - %s - evolution positive' WHERE %s;\n" % (plot_table, def_evo_field, idx_bef, idx_aft, class_name, sql_where_pos) sql_query += "UPDATE %s SET %s = 't%s a t%s - %s - evolution negative' WHERE %s;\n" % (plot_table, def_evo_field, idx_bef, idx_aft, class_name, sql_where_neg) # Ajout des paramètres de l'évolution quantifiée (temporalités, classe, taux/surface) au fichier texte de sortie text = "%s --> évolution entre t%s et t%s, pour la classe '%s' (label %s) :\n" % (def_evo_field, idx_bef, idx_aft, class_name, label) text += " %s --> taux d'évolution brut" % evo_field + " (%)\n" text += " %s --> surface d'évolution brute" % evo_s_field + " (m²)\n" text += "Evolution quantifiée : %s\n" % text_evol appendTextFileCR(output_evolution_text_file, text) temp_field += 1 # Traitements SQL de l'évolution des classes OCS executeQuery(connection, sql_query) closeConnection(connection) exportVectorByOgr2ogr(postgis_database_name, output_plot_vector, plot_table, user_name=postgis_user_name, password=postgis_password, ip_host=postgis_ip_host, num_port=postgis_num_port, schema_name=postgis_schema_name, format_type=format_vector) print(cyan + "soilOccupationChange() : " + bold + green + "ETAPE 2/2 - Fin de la caractérisation des changements." + endC + '\n') # Suppression des fichiers temporaires if not save_results_intermediate: if debug >= 3: print(cyan + "soilOccupationChange() : " + endC + "Suppression des fichiers temporaires." + endC + '\n') deleteDir(temp_directory) dropDatabase(postgis_database_name, user_name=postgis_user_name, password=postgis_password, ip_host=postgis_ip_host, num_port=postgis_num_port, schema_name=postgis_schema_name) print(cyan + "soilOccupationChange() : " + bold + green + "FIN DES TRAITEMENTS" + endC + '\n') # Mise à jour du log ending_event = "soilOccupationChange() : Fin du traitement : " timeLine(path_time_log, ending_event) return
def computeMajorityClass(input_grid, temp_directory, nodata_field, built_field, mineral_field, baresoil_field, water_field, vegetation_field, high_vegetation_field, low_vegetation_field, maj_ocs_field, veg_mean_field, class_label_dico_out, format_vector, extension_vector, overwrite): SUFFIX_CLASS = '_class' FIELD_TYPE = ogr.OFTInteger FIELD_NAME_MAJORITY = 'majority' temp_class_list = [] base_name = os.path.splitext(os.path.basename(input_grid))[0] temp_grid = temp_directory + os.sep + base_name + SUFFIX_CLASS + extension_vector temp_class0 = temp_directory + os.sep + base_name + SUFFIX_CLASS + "0" + extension_vector temp_class1 = temp_directory + os.sep + base_name + SUFFIX_CLASS + "1" + extension_vector temp_class2 = temp_directory + os.sep + base_name + SUFFIX_CLASS + "2" + extension_vector temp_class3 = temp_directory + os.sep + base_name + SUFFIX_CLASS + "3" + extension_vector temp_class4 = temp_directory + os.sep + base_name + SUFFIX_CLASS + "4" + extension_vector ### Récupération de la classe majoritaire if debug >= 3: print(cyan + "computeMajorityClass() : " + endC + bold + "Récupération de la classe majoritaire." + endC + '\n') addNewFieldVector(input_grid, maj_ocs_field, FIELD_TYPE, field_value=None, field_width=None, field_precision=None, format_vector=format_vector) attr_names_list = getAttributeNameList(input_grid, format_vector=format_vector) attr_names_list_str = "'" for attr_name in attr_names_list: attr_names_list_str += attr_name + ', ' attr_names_list_str = attr_names_list_str[:-2] + "'" expression = "%s = '%s' OR %s = '%s' OR %s = '%s' OR %s = '%s'" % ( FIELD_NAME_MAJORITY, nodata_field, FIELD_NAME_MAJORITY, built_field, FIELD_NAME_MAJORITY, mineral_field, FIELD_NAME_MAJORITY, water_field) ret = filterSelectDataVector(input_grid, temp_class0, attr_names_list_str, expression, overwrite=overwrite, format_vector=format_vector) updateFieldVector(temp_class0, field_name=maj_ocs_field, value=class_label_dico_out["MAJ_OTHERS_CLASS"], format_vector=format_vector) temp_class_list.append(temp_class0) expression = "%s = '%s'" % (FIELD_NAME_MAJORITY, baresoil_field) ret = filterSelectDataVector(input_grid, temp_class1, attr_names_list_str, expression, overwrite=overwrite, format_vector=format_vector) updateFieldVector(temp_class1, field_name=maj_ocs_field, value=class_label_dico_out["MAJ_BARESOIL_CLASS"], format_vector=format_vector) temp_class_list.append(temp_class1) expression = "(%s = '%s' OR %s = '%s' OR %s = '%s') AND (%s < 1)" % ( FIELD_NAME_MAJORITY, vegetation_field, FIELD_NAME_MAJORITY, low_vegetation_field, FIELD_NAME_MAJORITY, high_vegetation_field, veg_mean_field) ret = filterSelectDataVector(input_grid, temp_class2, attr_names_list_str, expression, overwrite=overwrite, format_vector=format_vector) updateFieldVector(temp_class2, field_name=maj_ocs_field, value=class_label_dico_out["MAJ_LOW_VEG_CLASS"], format_vector=format_vector) temp_class_list.append(temp_class2) expression = "(%s = '%s' OR %s = '%s' OR %s = '%s') AND (%s >= 1 AND %s < 5)" % ( FIELD_NAME_MAJORITY, vegetation_field, FIELD_NAME_MAJORITY, low_vegetation_field, FIELD_NAME_MAJORITY, high_vegetation_field, veg_mean_field, veg_mean_field) ret = filterSelectDataVector(input_grid, temp_class3, attr_names_list_str, expression, overwrite=overwrite, format_vector=format_vector) updateFieldVector(temp_class3, field_name=maj_ocs_field, value=class_label_dico_out["MAJ_MED_VEG_CLASS"], format_vector=format_vector) temp_class_list.append(temp_class3) expression = "(%s = '%s' OR %s = '%s' OR %s = '%s') AND (%s >= 5)" % ( FIELD_NAME_MAJORITY, vegetation_field, FIELD_NAME_MAJORITY, low_vegetation_field, FIELD_NAME_MAJORITY, high_vegetation_field, veg_mean_field) ret = filterSelectDataVector(input_grid, temp_class4, attr_names_list_str, expression, overwrite=overwrite, format_vector=format_vector) updateFieldVector(temp_class4, field_name=maj_ocs_field, value=class_label_dico_out["MAJ_HIGH_VEG_CLASS"], format_vector=format_vector) temp_class_list.append(temp_class4) fusionVectors(temp_class_list, temp_grid, format_vector=format_vector) removeVectorFile(input_grid, format_vector=format_vector) copyVectorFile(temp_grid, input_grid, format_vector=format_vector) return 0
def vectorsListToOcs(input_text, output_raster, footprint_vector, reference_raster, codage_raster='uint8', epsg=2154, no_data_value=0, format_raster='GTiff', format_vector='ESRI Shapefile', extension_raster='.tif', extension_vector='.shp', path_time_log='', save_results_intermediate=False, overwrite=True): if debug >= 3: print( '\n' + bold + green + "OCS raster à partir d'une liste de vecteurs - Variables dans la fonction :" + endC) print(cyan + " vectorsListToOcs() : " + endC + "input_text : " + str(input_text) + endC) print(cyan + " vectorsListToOcs() : " + endC + "output_raster : " + str(output_raster) + endC) print(cyan + " vectorsListToOcs() : " + endC + "footprint_vector : " + str(footprint_vector) + endC) print(cyan + " vectorsListToOcs() : " + endC + "reference_raster : " + str(reference_raster) + endC) print(cyan + " vectorsListToOcs() : " + endC + "codage_raster : " + str(codage_raster) + endC) print(cyan + " vectorsListToOcs() : " + endC + "epsg : " + str(epsg) + endC) print(cyan + " vectorsListToOcs() : " + endC + "no_data_value : " + str(no_data_value) + endC) print(cyan + " vectorsListToOcs() : " + endC + "format_raster : " + str(format_raster) + endC) print(cyan + " vectorsListToOcs() : " + endC + "format_vector : " + str(format_vector) + endC) print(cyan + " vectorsListToOcs() : " + endC + "extension_raster : " + str(extension_raster) + endC) print(cyan + " vectorsListToOcs() : " + endC + "extension_vector : " + str(extension_vector) + endC) print(cyan + " vectorsListToOcs() : " + endC + "path_time_log : " + str(path_time_log) + endC) print(cyan + " vectorsListToOcs() : " + endC + "save_results_intermediate : " + str(save_results_intermediate) + endC) print(cyan + " vectorsListToOcs() : " + endC + "overwrite : " + str(overwrite) + endC + '\n') # Définition des constantes SUFFIX_TEMP = '_temp' SUFFIX_CUT = '_cut' SUFFIX_FILTER = '_filter' SUFFIX_BUFFER = '_buffer' TEXT_SEPARATOR = ':' # Mise à jour du log starting_event = "vectorsListToOcs() : Début du traitement : " timeLine(path_time_log, starting_event) print(cyan + "vectorsListToOcs() : " + bold + green + "DEBUT DES TRAITEMENTS" + endC + '\n') # Définition des variables 'basename' output_raster_basename = os.path.basename( os.path.splitext(output_raster)[0]) output_raster_dirname = os.path.dirname(output_raster) # Définition des variables temp temp_directory = output_raster_dirname + os.sep + output_raster_basename + SUFFIX_TEMP temp_raster = temp_directory + os.sep + output_raster_basename + SUFFIX_TEMP + extension_raster # Nettoyage des traitements précédents if overwrite: if debug >= 3: print(cyan + "vectorsListToOcs() : " + endC + "Nettoyage des traitements précédents." + '\n') removeFile(output_raster) cleanTempData(temp_directory) else: if os.path.exists(output_raster): print(cyan + "vectorsListToOcs() : " + bold + yellow + "Le fichier de sortie existe déjà et ne sera pas regénéré." + endC) raise if not os.path.exixts(temp_directory): os.makedirs(temp_directory) pass # Test de l'emprise des fichiers vecteur d'emprise et raster de référence (le raster doit être de même taille ou plus grand que le vecteur) xmin_fpt, xmax_fpt, ymin_fpt, ymax_fpt = getEmpriseFile( footprint_vector, format_vector=format_vector) xmin_ref, xmax_ref, ymin_ref, ymax_ref = getEmpriseImage(reference_raster) if round(xmin_fpt, 4) < round(xmin_ref, 4) or round(xmax_fpt, 4) > round( xmax_ref, 4) or round(ymin_fpt, 4) < round(ymin_ref, 4) or round( ymax_fpt, 4) > round(ymax_ref, 4): print(cyan + "vectorsListToOcs() : " + bold + red + "xmin_fpt, xmax_fpt, ymin_fpt, ymax_fpt" + endC, xmin_fpt, xmax_fpt, ymin_fpt, ymax_fpt, file=sys.stderr) print(cyan + "vectorsListToOcs() : " + bold + red + "xmin_ref, xmax_ref, ymin_ref, ymax_ref" + endC, xmin_ref, xmax_ref, ymin_ref, ymax_ref, file=sys.stderr) raise NameError( cyan + "vectorsListToOcs() : " + bold + red + "The extend of the footprint vector (%s) is greater than the reference raster (%s)." % (footprint_vector, reference_raster) + endC) # Récupération des traitements à faire dans le fichier texte d'entrée text_list = readTextFileBySeparator(input_text, TEXT_SEPARATOR) #################################################################### print(cyan + "vectorsListToOcs() : " + bold + green + "Début de la génération de l'OCS raster à partir de vecteurs." + endC + '\n') # Boucle sur les traitements à réaliser for text in text_list: idx = text_list.index(text) + 1 class_label = int(text[0]) vector_file = text[1] if debug >= 3: print(cyan + "vectorsListToOcs() : " + endC + bold + "Génération %s/%s : " % (idx, len(text_list)) + endC + "traitement du fichier %s (label %s)." % (vector_file, str(class_label)) + '\n') # Gestion des noms des fichiers temporaires vector_file_basename = os.path.basename( os.path.splitext(vector_file)[0]) vector_file_cut = temp_directory + os.sep + vector_file_basename + SUFFIX_CUT + extension_vector vector_file_filter = temp_directory + os.sep + vector_file_basename + SUFFIX_FILTER + extension_vector vector_file_buffer = temp_directory + os.sep + vector_file_basename + SUFFIX_BUFFER + extension_vector vector_file_raster = temp_directory + os.sep + vector_file_basename + extension_raster # Gestion des variables de traitement (tampon et filtrage SQL) try: buffer_len = float(text[2]) except ValueError: buffer_len = text[2] except Exception: buffer_len = '' try: sql_filter = text[3] except Exception: sql_filter = '' # Découpage à l'emprise de la zone d'étude if debug >= 3: print(cyan + "vectorsListToOcs() : " + endC + "Découpage à l'emprise de la zone d'étude." + '\n') cutVectorAll(footprint_vector, vector_file, vector_file_cut, overwrite=overwrite, format_vector=format_vector) # Filtrage SQL (facultatif) if sql_filter != '': if debug >= 3: print(cyan + "vectorsListToOcs() : " + endC + "Application du filtrage SQL : %s." % sql_filter + '\n') attr_names_list = getAttributeNameList(vector_file_cut, format_vector=format_vector) column = "'" for attr_name in attr_names_list: column += attr_name + ", " column = column[:-2] column += "'" filterSelectDataVector(vector_file_cut, vector_file_filter, column, sql_filter, overwrite=overwrite, format_vector=format_vector) else: vector_file_filter = vector_file_cut # Application d'un tampon (facultatif) if buffer_len != '' and buffer_len != 0: if debug >= 3: print(cyan + "vectorsListToOcs() : " + endC + "Application d'un buffer : %s." % buffer_len + '\n') if type(buffer_len) is float: bufferVector(vector_file_filter, vector_file_buffer, buffer_len, col_name_buf='', fact_buf=1.0, quadsecs=10, format_vector=format_vector) else: bufferVector(vector_file_filter, vector_file_buffer, 0, col_name_buf=buffer_len, fact_buf=1.0, quadsecs=10, format_vector=format_vector) else: vector_file_buffer = vector_file_filter # Rastérisation du vecteur préparé if debug >= 3: print(cyan + "vectorsListToOcs() : " + endC + "Rastérisation du vecteur préparé." + '\n') rasterizeBinaryVector(vector_file_buffer, reference_raster, vector_file_raster, label=class_label, codage=codage_raster) # Ajout de l'information dans le raster de sortie if debug >= 3: print(cyan + "vectorsListToOcs() : " + endC + "Ajout de l'information dans le raster de sortie." + '\n') if idx == 1: shutil.copy(vector_file_raster, output_raster) else: removeFile(temp_raster) shutil.copy(output_raster, temp_raster) removeFile(output_raster) expression = "im1b1!=%s ? im1b1 : im2b1" % no_data_value rasterCalculator([temp_raster, vector_file_raster], output_raster, expression, codage=codage_raster) print(cyan + "vectorsListToOcs() : " + bold + green + "Fin de la génération de l'OCS raster à partir de vecteurs." + endC + '\n') #################################################################### # Suppression des fichiers temporaires if not save_results_intermediate: if debug >= 3: print(cyan + "vectorsListToOcs() : " + endC + "Suppression des fichiers temporaires." + '\n') deleteDir(temp_directory) print(cyan + "vectorsListToOcs() : " + bold + green + "FIN DES TRAITEMENTS" + endC + '\n') # Mise à jour du log ending_event = "vectorsListToOcs() : Fin du traitement : " timeLine(path_time_log, ending_event) return 0
def heightOfRoughnessElements(grid_input, grid_output, built_input, height_field, id_field, epsg, project_encoding, server_postgis, port_number, user_postgis, password_postgis, database_postgis, schema_postgis, path_time_log, format_vector='ESRI Shapefile', save_results_intermediate=False, overwrite=True): print(bold + yellow + "Début du calcul de l'indicateur Height of Roughness Elements." + endC + "\n") timeLine(path_time_log, "Début du calcul de l'indicateur Height of Roughness Elements : ") if debug >= 3: print(bold + green + "heightOfRoughnessElements() : Variables dans la fonction" + endC) print(cyan + "heightOfRoughnessElements() : " + endC + "grid_input : " + str(grid_input) + endC) print(cyan + "heightOfRoughnessElements() : " + endC + "grid_output : " + str(grid_output) + endC) print(cyan + "heightOfRoughnessElements() : " + endC + "built_input : " + str(built_input) + endC) print(cyan + "heightOfRoughnessElements() : " + endC + "height_field : " + str(height_field) + endC) print(cyan + "heightOfRoughnessElements() : " + endC + "id_field : " + str(id_field) + endC) print(cyan + "heightOfRoughnessElements() : " + endC + "epsg : " + str(epsg) + endC) print(cyan + "heightOfRoughnessElements() : " + endC + "project_encoding : " + str(project_encoding) + endC) print(cyan + "heightOfRoughnessElements() : " + endC + "server_postgis : " + str(server_postgis) + endC) print(cyan + "heightOfRoughnessElements() : " + endC + "port_number : " + str(port_number) + endC) print(cyan + "heightOfRoughnessElements() : " + endC + "user_postgis : " + str(user_postgis) + endC) print(cyan + "heightOfRoughnessElements() : " + endC + "password_postgis : " + str(password_postgis) + endC) print(cyan + "heightOfRoughnessElements() : " + endC + "database_postgis : " + str(database_postgis) + endC) print(cyan + "heightOfRoughnessElements() : " + endC + "schema_postgis : " + str(schema_postgis) + endC) print(cyan + "heightOfRoughnessElements() : " + endC + "path_time_log : " + str(path_time_log) + endC) print(cyan + "heightOfRoughnessElements() : " + endC + "format_vector : " + str(format_vector) + endC) print(cyan + "heightOfRoughnessElements() : " + endC + "save_results_intermediate : " + str(save_results_intermediate) + endC) print(cyan + "heightOfRoughnessElements() : " + endC + "overwrite : " + str(overwrite) + endC) print("\n") if not os.path.exists(grid_output) or overwrite: ############################################ ### Préparation générale des traitements ### ############################################ print(bold + cyan + "Préparation au calcul de Height of Roughness Elements :" + endC) timeLine(path_time_log, " Préparation au calcul de Height of Roughness Elements : ") if os.path.exists(grid_output): removeVectorFile(grid_output) # Création de la base de données PostGIS # ~ dropDatabase(database_postgis, user_name=user_postgis, password=password_postgis, ip_host=server_postgis, num_port=port_number, schema_name=schema_postgis) # Conflits avec autres indicateurs (Aspect Ratio / Terrain Roughness Class) createDatabase(database_postgis, user_name=user_postgis, password=password_postgis, ip_host=server_postgis, num_port=port_number, schema_name=schema_postgis) # Import des fichiers shapes maille et bati dans la base de données PostGIS table_name_maille = importVectorByOgr2ogr(database_postgis, grid_input, 'hre_maille', user_name=user_postgis, password=password_postgis, ip_host=server_postgis, num_port=str(port_number), schema_name=schema_postgis, epsg=str(epsg), codage=project_encoding) table_name_bati = importVectorByOgr2ogr(database_postgis, built_input, 'hre_bati', user_name=user_postgis, password=password_postgis, ip_host=server_postgis, num_port=str(port_number), schema_name=schema_postgis, epsg=str(epsg), codage=project_encoding) # Gestion de l'ID connection = openConnection(database_postgis, user_name=user_postgis, password=password_postgis, ip_host=server_postgis, num_port=port_number, schema_name=schema_postgis) attr_names_list = getAttributeNameList(built_input, format_vector=format_vector) if id_field not in attr_names_list: id_field = 'ogc_fid' # ~ id_query = "ALTER TABLE %s ADD COLUMN %s SERIAL PRIMARY KEY" % (table_name_bati, id_field) # ~ executeQuery(connection, id_query) ############################################### ### Calcul de l'indicateur par requêtes SQL ### ############################################### print(bold + cyan + "Calcul de Height of Roughness Elements :" + endC) timeLine(path_time_log, " Calcul de Height of Roughness Elements : ") # Création des index spatiaux (accélère les requêtes spatiales) query = """ --CREATE INDEX IF NOT EXISTS maille_geom_gist ON %s USING GIST (geom); --CREATE INDEX IF NOT EXISTS bati_geom_gist ON %s USING GIST (geom); """ % (table_name_maille, table_name_bati) # Intersect entre les tables maille et bâti (pour chaque maille, on récupère le bâti qui intersect) query += """ DROP TABLE IF EXISTS hre_decoup; CREATE TABLE hre_decoup AS SELECT b.%s as ID, b.%s as hauteur, ST_Intersection(b.geom, m.geom) as geom FROM %s as b, %s as m WHERE ST_Intersects(b.geom, m.geom) AND (ST_GeometryType(b.geom) = 'ST_Polygon' OR ST_GeometryType(b.geom) = 'ST_MultiPolygon') AND (ST_GeometryType(m.geom) = 'ST_Polygon' OR ST_GeometryType(m.geom) = 'ST_MultiPolygon'); CREATE INDEX IF NOT EXISTS decoup_geom_gist ON hre_decoup USING GIST (geom); """ % (id_field, height_field, table_name_bati, table_name_maille) # Table intermédiaire de calculs d'indicateurs secondaires query += """ DROP TABLE IF EXISTS hre_temp; CREATE TABLE hre_temp AS SELECT d.ID, st_area(d.geom) as surface, (st_area(d.geom) * d.hauteur) as volume, d.geom as geom FROM hre_decoup as d; CREATE INDEX IF NOT EXISTS temp_geom_gist ON hre_temp USING GIST (geom); """ # Table intermédiaire de calcul de mean_h seulement pour les mailles intersectant du bâti query += """ DROP TABLE IF EXISTS hre_maille_bis; CREATE TABLE hre_maille_bis AS SELECT m.ID as ID, ((sum(t.volume) / count(t.geom)) / (sum(t.surface) / count(t.geom))) as mean_h, m.geom as geom FROM %s as m, hre_temp as t WHERE ST_Intersects(m.geom, t.geom) AND (ST_GeometryType(m.geom) = 'ST_Polygon' OR ST_GeometryType(m.geom) = 'ST_MultiPolygon') AND (ST_GeometryType(t.geom) = 'ST_Polygon' OR ST_GeometryType(t.geom) = 'ST_MultiPolygon') GROUP BY m.ID, m.geom; CREATE INDEX IF NOT EXISTS maille_bis_geom_gist ON hre_maille_bis USING GIST (geom); """ % (table_name_maille) # Table intermédiaire seulement pour les mailles n'intersectant pas de bâti (par défaut, mean_h = 0) query += """ DROP TABLE IF EXISTS hre_maille_ter; CREATE TABLE hre_maille_ter AS SELECT DISTINCT ID as ID, geom as geom FROM %s WHERE ID NOT IN (SELECT DISTINCT ID FROM hre_maille_bis); ALTER TABLE hre_maille_ter ADD mean_h DOUBLE PRECISION; UPDATE hre_maille_ter SET mean_h = 0; CREATE INDEX IF NOT EXISTS maille_ter_geom_gist ON hre_maille_ter USING GIST (geom); """ % (table_name_maille) # Union des 2 tables précédentes pour récupérer l'ensemble des polygones maille de départ query += """ DROP TABLE IF EXISTS hre_height; CREATE TABLE hre_height AS SELECT ID, mean_h, geom FROM hre_maille_bis UNION SELECT ID, mean_h, geom FROM hre_maille_ter; ALTER TABLE hre_height ALTER COLUMN ID TYPE INTEGER; """ # Exécution de la requête SQL if debug >= 1: print(query) executeQuery(connection, query) closeConnection(connection) # Export en shape de la table contenant l'indicateur calculé exportVectorByOgr2ogr(database_postgis, grid_output, 'hre_height', user_name=user_postgis, password=password_postgis, ip_host=server_postgis, num_port=str(port_number), schema_name=schema_postgis, format_type=format_vector) ########################################## ### Nettoyage des fichiers temporaires ### ########################################## if not save_results_intermediate: # ~ dropDatabase(database_postgis, user_name=user_postgis, password=password_postgis, ip_host=server_postgis, num_port=port_number, schema_name=schema_postgis) # Conflits avec autres indicateurs (Aspect Ratio / Terrain Roughness Class) pass else: print(bold + magenta + "Le calcul de Height of Roughness Elements a déjà eu lieu." + endC) print(bold + yellow + "Fin du calcul de l'indicateur Height of Roughness Elements." + endC + "\n") timeLine(path_time_log, "Fin du calcul de l'indicateur Height of Roughness Elements : ") return
def createMacroSamples(image_input, vector_to_cut_input, vector_sample_output, raster_sample_output, bd_vector_input_list, bd_buff_list, sql_expression_list, path_time_log, macro_sample_name="", simplify_vector_param=10.0, format_vector='ESRI Shapefile', extension_vector=".shp", save_results_intermediate=False, overwrite=True) : # Mise à jour du Log starting_event = "createMacroSamples() : create macro samples starting : " timeLine(path_time_log,starting_event) if debug >= 3: print(bold + green + "createMacroSamples() : Variables dans la fonction" + endC) print(cyan + "createMacroSamples() : " + endC + "image_input : " + str(image_input) + endC) print(cyan + "createMacroSamples() : " + endC + "vector_to_cut_input : " + str(vector_to_cut_input) + endC) print(cyan + "createMacroSamples() : " + endC + "vector_sample_output : " + str(vector_sample_output) + endC) print(cyan + "createMacroSamples() : " + endC + "raster_sample_output : " + str(raster_sample_output) + endC) print(cyan + "createMacroSamples() : " + endC + "bd_vector_input_list : " + str(bd_vector_input_list) + endC) print(cyan + "createMacroSamples() : " + endC + "bd_buff_list : " + str(bd_buff_list) + endC) print(cyan + "createMacroSamples() : " + endC + "sql_expression_list : " + str(sql_expression_list) + endC) print(cyan + "createMacroSamples() : " + endC + "path_time_log : " + str(path_time_log) + endC) print(cyan + "createMacroSamples() : " + endC + "macro_sample_name : " + str(macro_sample_name) + endC) print(cyan + "createMacroSamples() : " + endC + "simplify_vector_param : " + str(simplify_vector_param) + endC) print(cyan + "createMacroSamples() : " + endC + "format_vector : " + str(format_vector)) print(cyan + "createMacroSamples() : " + endC + "extension_vector : " + str(extension_vector) + endC) print(cyan + "createMacroSamples() : " + endC + "save_results_intermediate : " + str(save_results_intermediate) + endC) print(cyan + "createMacroSamples() : " + endC + "overwrite : " + str(overwrite) + endC) # Constantes FOLDER_MASK_TEMP = "Mask_" FOLDER_CUTTING_TEMP = "Cut_" FOLDER_FILTERING_TEMP = "Filter_" FOLDER_BUFF_TEMP = "Buff_" SUFFIX_MASK_CRUDE = "_crude" SUFFIX_MASK = "_mask" SUFFIX_VECTOR_CUT = "_cut" SUFFIX_VECTOR_FILTER = "_filt" SUFFIX_VECTOR_BUFF = "_buff" CODAGE = "uint8" # ETAPE 1 : NETTOYER LES DONNEES EXISTANTES print(cyan + "createMacroSamples() : " + bold + green + "Nettoyage de l'espace de travail..." + endC) # Nom du repertoire de calcul repertory_macrosamples_output = os.path.dirname(vector_sample_output) # Test si le vecteur echantillon existe déjà et si il doit être écrasés check = os.path.isfile(vector_sample_output) or os.path.isfile(raster_sample_output) if check and not overwrite: # Si les fichiers echantillons existent deja et que overwrite n'est pas activé print(bold + yellow + "File sample : " + vector_sample_output + " already exists and will not be created again." + endC) else : if check: try: removeVectorFile(vector_sample_output) removeFile(raster_sample_output) except Exception: pass # si le fichier n'existe pas, il ne peut pas être supprimé : cette étape est ignorée # Définition des répertoires temporaires repertory_mask_temp = repertory_macrosamples_output + os.sep + FOLDER_MASK_TEMP + macro_sample_name repertory_samples_cutting_temp = repertory_macrosamples_output + os.sep + FOLDER_CUTTING_TEMP + macro_sample_name repertory_samples_filtering_temp = repertory_macrosamples_output + os.sep + FOLDER_FILTERING_TEMP + macro_sample_name repertory_samples_buff_temp = repertory_macrosamples_output + os.sep + FOLDER_BUFF_TEMP + macro_sample_name if debug >= 4: print(cyan + "createMacroSamples() : " + endC + "Création du répertoire : " + str(repertory_mask_temp)) print(cyan + "createMacroSamples() : " + endC + "Création du répertoire : " + str(repertory_samples_cutting_temp)) print(cyan + "createMacroSamples() : " + endC + "Création du répertoire : " + str(repertory_samples_buff_temp)) # Création des répertoires temporaire qui n'existent pas if not os.path.isdir(repertory_macrosamples_output): os.makedirs(repertory_macrosamples_output) if not os.path.isdir(repertory_mask_temp): os.makedirs(repertory_mask_temp) if not os.path.isdir(repertory_samples_cutting_temp): os.makedirs(repertory_samples_cutting_temp) if not os.path.isdir(repertory_samples_filtering_temp): os.makedirs(repertory_samples_filtering_temp) if not os.path.isdir(repertory_samples_buff_temp): os.makedirs(repertory_samples_buff_temp) # Nettoyage des répertoires temporaire qui ne sont pas vide cleanTempData(repertory_mask_temp) cleanTempData(repertory_samples_cutting_temp) cleanTempData(repertory_samples_filtering_temp) cleanTempData(repertory_samples_buff_temp) print(cyan + "createMacroSamples() : " + bold + green + "... fin du nettoyage" + endC) # ETAPE 2 : DECOUPAGE DES VECTEURS print(cyan + "createMacroSamples() : " + bold + green + "Decoupage des echantillons ..." + endC) if vector_to_cut_input == None : # 2.1 : Création du masque délimitant l'emprise de la zone par image image_name = os.path.splitext(os.path.basename(image_input))[0] vector_mask = repertory_mask_temp + os.sep + image_name + SUFFIX_MASK_CRUDE + extension_vector createVectorMask(image_input, vector_mask) # 2.2 : Simplification du masque vector_simple_mask = repertory_mask_temp + os.sep + image_name + SUFFIX_MASK + extension_vector simplifyVector(vector_mask, vector_simple_mask, simplify_vector_param, format_vector) else : vector_simple_mask = vector_to_cut_input # 2.3 : Découpage des vecteurs de bd exogenes avec le masque vectors_cut_list = [] for vector_input in bd_vector_input_list : vector_name = os.path.splitext(os.path.basename(vector_input))[0] vector_cut = repertory_samples_cutting_temp + os.sep + vector_name + SUFFIX_VECTOR_CUT + extension_vector vectors_cut_list.append(vector_cut) cutoutVectors(vector_simple_mask, bd_vector_input_list, vectors_cut_list, format_vector) print(cyan + "createMacroSamples() : " + bold + green + "... fin du decoupage" + endC) # ETAPE 3 : FILTRAGE DES VECTEURS print(cyan + "createMacroSamples() : " + bold + green + "Filtrage des echantillons ..." + endC) vectors_filtered_list = [] if sql_expression_list != [] : for idx_vector in range (len(bd_vector_input_list)): vector_name = os.path.splitext(os.path.basename(bd_vector_input_list[idx_vector]))[0] vector_cut = vectors_cut_list[idx_vector] if idx_vector < len(sql_expression_list) : sql_expression = sql_expression_list[idx_vector] else : sql_expression = "" vector_filtered = repertory_samples_filtering_temp + os.sep + vector_name + SUFFIX_VECTOR_FILTER + extension_vector vectors_filtered_list.append(vector_filtered) # Filtrage par ogr2ogr if sql_expression != "": names_attribut_list = getAttributeNameList(vector_cut, format_vector) column = "'" for name_attribut in names_attribut_list : column += name_attribut + ", " column = column[0:len(column)-2] column += "'" ret = filterSelectDataVector(vector_cut, vector_filtered, column, sql_expression, format_vector) if not ret : print(cyan + "createMacroSamples() : " + bold + yellow + "Attention problème lors du filtrage des BD vecteurs l'expression SQL %s est incorrecte" %(sql_expression) + endC) copyVectorFile(vector_cut, vector_filtered) else : print(cyan + "createMacroSamples() : " + bold + yellow + "Pas de filtrage sur le fichier du nom : " + endC + vector_filtered) copyVectorFile(vector_cut, vector_filtered) else : print(cyan + "createMacroSamples() : " + bold + yellow + "Pas de filtrage demandé" + endC) for idx_vector in range (len(bd_vector_input_list)): vector_cut = vectors_cut_list[idx_vector] vectors_filtered_list.append(vector_cut) print(cyan + "createMacroSamples() : " + bold + green + "... fin du filtrage" + endC) # ETAPE 4 : BUFFERISATION DES VECTEURS print(cyan + "createMacroSamples() : " + bold + green + "Mise en place des tampons..." + endC) vectors_buffered_list = [] if bd_buff_list != [] : # Parcours des vecteurs d'entrée for idx_vector in range (len(bd_vector_input_list)): vector_name = os.path.splitext(os.path.basename(bd_vector_input_list[idx_vector]))[0] buff = bd_buff_list[idx_vector] vector_filtered = vectors_filtered_list[idx_vector] vector_buffered = repertory_samples_buff_temp + os.sep + vector_name + SUFFIX_VECTOR_BUFF + extension_vector if buff != 0: if os.path.isfile(vector_filtered): if debug >= 3: print(cyan + "createMacroSamples() : " + endC + "vector_filtered : " + str(vector_filtered) + endC) print(cyan + "createMacroSamples() : " + endC + "vector_buffered : " + str(vector_buffered) + endC) print(cyan + "createMacroSamples() : " + endC + "buff : " + str(buff) + endC) bufferVector(vector_filtered, vector_buffered, buff, "", 1.0, 10, format_vector) else : print(cyan + "createMacroSamples() : " + bold + yellow + "Pas de fichier du nom : " + endC + vector_filtered) else : print(cyan + "createMacroSamples() : " + bold + yellow + "Pas de tampon sur le fichier du nom : " + endC + vector_filtered) copyVectorFile(vector_filtered, vector_buffered) vectors_buffered_list.append(vector_buffered) else : print(cyan + "createMacroSamples() : " + bold + yellow + "Pas de tampon demandé" + endC) for idx_vector in range (len(bd_vector_input_list)): vector_filtered = vectors_filtered_list[idx_vector] vectors_buffered_list.append(vector_filtered) print(cyan + "createMacroSamples() : " + bold + green + "... fin de la mise en place des tampons" + endC) # ETAPE 5 : FUSION DES SHAPES print(cyan + "createMacroSamples() : " + bold + green + "Fusion par macroclasse ..." + endC) # si une liste de fichier shape à fusionner existe if not vectors_buffered_list: print(cyan + "createMacroSamples() : " + bold + yellow + "Pas de fusion sans donnee à fusionner" + endC) # s'il n'y a qu'un fichier shape en entrée elif len(vectors_buffered_list) == 1: print(cyan + "createMacroSamples() : " + bold + yellow + "Pas de fusion pour une seule donnee à fusionner" + endC) copyVectorFile(vectors_buffered_list[0], vector_sample_output) else : # Fusion des fichiers shape vectors_buffered_controled_list = [] for vector_buffered in vectors_buffered_list : if os.path.isfile(vector_buffered) and (getGeometryType(vector_buffered, format_vector) in ('POLYGON', 'MULTIPOLYGON')) and (getNumberFeature(vector_buffered, format_vector) > 0): vectors_buffered_controled_list.append(vector_buffered) else : print(cyan + "createMacroSamples() : " + bold + red + "Attention fichier bufferisé est vide il ne sera pas fusionné : " + endC + vector_buffered, file=sys.stderr) fusionVectors(vectors_buffered_controled_list, vector_sample_output, format_vector) print(cyan + "createMacroSamples() : " + bold + green + "... fin de la fusion" + endC) # ETAPE 6 : CREATION DU FICHIER RASTER RESULTAT SI DEMANDE # Creation d'un masque binaire if raster_sample_output != "" and image_input != "" : repertory_output = os.path.dirname(raster_sample_output) if not os.path.isdir(repertory_output): os.makedirs(repertory_output) rasterizeBinaryVector(vector_sample_output, image_input, raster_sample_output, 1, CODAGE) # ETAPE 7 : SUPPRESIONS FICHIERS INTERMEDIAIRES INUTILES # Suppression des données intermédiaires if not save_results_intermediate: # Supression du fichier de decoupe si celui ci a été créer if vector_simple_mask != vector_to_cut_input : if os.path.isfile(vector_simple_mask) : removeVectorFile(vector_simple_mask) # Suppression des repertoires temporaires deleteDir(repertory_mask_temp) deleteDir(repertory_samples_cutting_temp) deleteDir(repertory_samples_filtering_temp) deleteDir(repertory_samples_buff_temp) # Mise à jour du Log ending_event = "createMacroSamples() : create macro samples ending : " timeLine(path_time_log,ending_event) return
def selectSamples(image_input_list, sample_image_input, vector_output, table_statistics_output, sampler_strategy, select_ratio_floor, ratio_per_class_dico, name_column, no_data_value, path_time_log, rand_seed=0, ram_otb=0, epsg=2154, format_vector='ESRI Shapefile', extension_vector=".shp", save_results_intermediate=False, overwrite=True) : # Mise à jour du Log starting_event = "selectSamples() : Select points in raster mask macro input starting : " timeLine(path_time_log, starting_event) if debug >= 3: print(cyan + "selectSamples() : " + endC + "image_input_list : " + str(image_input_list) + endC) print(cyan + "selectSamples() : " + endC + "sample_image_input : " + str(sample_image_input) + endC) print(cyan + "selectSamples() : " + endC + "vector_output : " + str(vector_output) + endC) print(cyan + "selectSamples() : " + endC + "table_statistics_output : " + str(table_statistics_output) + endC) print(cyan + "selectSamples() : " + endC + "sampler_strategy : " + str(sampler_strategy) + endC) print(cyan + "selectSamples() : " + endC + "select_ratio_floor : " + str(select_ratio_floor) + endC) print(cyan + "selectSamples() : " + endC + "ratio_per_class_dico : " + str(ratio_per_class_dico) + endC) print(cyan + "selectSamples() : " + endC + "name_column : " + str(name_column) + endC) print(cyan + "selectSamples() : " + endC + "no_data_value : " + str(no_data_value) + endC) print(cyan + "selectSamples() : " + endC + "path_time_log : " + str(path_time_log) + endC) print(cyan + "selectSamples() : " + endC + "rand_seed : " + str(rand_seed) + endC) print(cyan + "selectSamples() : " + endC + "ram_otb : " + str(ram_otb) + endC) print(cyan + "selectSamples() : " + endC + "epsg : " + str(epsg) + endC) print(cyan + "selectSamples() : " + endC + "format_vector : " + str(format_vector) + endC) print(cyan + "selectSamples() : " + endC + "extension_vector : " + str(extension_vector) + endC) print(cyan + "selectSamples() : " + endC + "save_results_intermediate : " + str(save_results_intermediate) + endC) print(cyan + "selectSamples() : " + endC + "overwrite : " + str(overwrite) + endC) # Constantes EXT_XML = ".xml" SUFFIX_SAMPLE = "_sample" SUFFIX_STATISTICS = "_statistics" SUFFIX_POINTS = "_points" SUFFIX_VALUE = "_value" BAND_NAME = "band_" COLUMN_CLASS = "class" COLUMN_ORIGINFID = "originfid" NB_POINTS = "nb_points" AVERAGE = "average" STANDARD_DEVIATION = "st_dev" print(cyan + "selectSamples() : " + bold + green + "DEBUT DE LA SELECTION DE POINTS" + endC) # Definition variables et chemins repertory_output = os.path.dirname(vector_output) filename = os.path.splitext(os.path.basename(vector_output))[0] sample_points_output = repertory_output + os.sep + filename + SUFFIX_SAMPLE + extension_vector file_statistic_points = repertory_output + os.sep + filename + SUFFIX_STATISTICS + SUFFIX_POINTS + EXT_XML if debug >= 3: print(cyan + "selectSamples() : " + endC + "file_statistic_points : " + str(file_statistic_points) + endC) # 0. EXISTENCE DU FICHIER DE SORTIE #---------------------------------- # Si le fichier vecteur points de sortie existe deja et que overwrite n'est pas activé check = os.path.isfile(vector_output) if check and not overwrite: print(bold + yellow + "Samples points already done for file %s and will not be calculated again." %(vector_output) + endC) else: # Si non ou si la vérification est désactivée : creation du fichier d'échantillons points # Suppression de l'éventuel fichier existant if check: try: removeVectorFile(vector_output) except Exception: pass # Si le fichier ne peut pas être supprimé, on suppose qu'il n'existe pas et on passe à la suite if os.path.isfile(table_statistics_output) : try: removeFile(table_statistics_output) except Exception: pass # Si le fichier ne peut pas être supprimé, on suppose qu'il n'existe pas et on passe à la suite # 1. STATISTIQUE SUR L'IMAGE DES ECHANTILLONS RASTEUR #---------------------------------------------------- if debug >= 3: print(cyan + "selectSamples() : " + bold + green + "Start statistique sur l'image des echantillons rasteur..." + endC) id_micro_list = identifyPixelValues(sample_image_input) if 0 in id_micro_list : id_micro_list.remove(0) min_micro_class_nb_points = -1 min_micro_class_label = 0 infoStructPointSource_dico = {} writeTextFile(file_statistic_points, '<?xml version="1.0" ?>\n') appendTextFileCR(file_statistic_points, '<GeneralStatistics>') appendTextFileCR(file_statistic_points, ' <Statistic name="pointsPerClassRaw">') if debug >= 2: print("Nombre de points par micro classe :" + endC) for id_micro in id_micro_list : nb_pixels = countPixelsOfValue(sample_image_input, id_micro) if debug >= 2: print("MicroClass : " + str(id_micro) + ", nb_points = " + str(nb_pixels)) appendTextFileCR(file_statistic_points, ' <StatisticPoints class="%d" value="%d" />' %(id_micro, nb_pixels)) if min_micro_class_nb_points == -1 or min_micro_class_nb_points > nb_pixels : min_micro_class_nb_points = nb_pixels min_micro_class_label = id_micro infoStructPointSource_dico[id_micro] = StructInfoMicoClass() infoStructPointSource_dico[id_micro].label_class = id_micro infoStructPointSource_dico[id_micro].nb_points = nb_pixels infoStructPointSource_dico[id_micro].info_points_list = [] del nb_pixels if debug >= 2: print("MicroClass min points find : " + str(min_micro_class_label) + ", nb_points = " + str(min_micro_class_nb_points)) appendTextFileCR(file_statistic_points, ' </Statistic>') pending_event = cyan + "selectSamples() : " + bold + green + "End statistique sur l'image des echantillons rasteur. " + endC if debug >= 3: print(pending_event) timeLine(path_time_log,pending_event) # 2. CHARGEMENT DE L'IMAGE DES ECHANTILLONS #------------------------------------------ if debug >= 3: print(cyan + "selectSamples() : " + bold + green + "Start chargement de l'image des echantillons..." + endC) # Information image cols, rows, bands = getGeometryImage(sample_image_input) xmin, xmax, ymin, ymax = getEmpriseImage(sample_image_input) pixel_width, pixel_height = getPixelWidthXYImage(sample_image_input) projection_input = getProjectionImage(sample_image_input) if projection_input == None or projection_input == 0 : projection_input = epsg else : projection_input = int(projection_input) pixel_width = abs(pixel_width) pixel_height = abs(pixel_height) # Lecture des données raw_data = getRawDataImage(sample_image_input) if debug >= 3: print("projection = " + str(projection_input)) print("cols = " + str(cols)) print("rows = " + str(rows)) # Creation d'une structure dico contenent tous les points différents de zéro progress = 0 pass_prog = False for y_row in range(rows) : for x_col in range(cols) : value_class = raw_data[y_row][x_col] if value_class != 0 : infoStructPointSource_dico[value_class].info_points_list.append(x_col + (y_row * cols)) # Barre de progression if debug >= 4: if ((float(y_row) / rows) * 100.0 > progress) and not pass_prog : progress += 1 pass_prog = True print("Progression => " + str(progress) + "%") if ((float(y_row) / rows) * 100.0 > progress + 1) : pass_prog = False del raw_data pending_event = cyan + "selectSamples() : " + bold + green + "End chargement de l'image des echantillons. " + endC if debug >= 3: print(pending_event) timeLine(path_time_log,pending_event) # 3. SELECTION DES POINTS D'ECHANTILLON #-------------------------------------- if debug >= 3: print(cyan + "selectSamples() : " + bold + green + "Start selection des points d'echantillon..." + endC) appendTextFileCR(file_statistic_points, ' <Statistic name="pointsPerClassSelect">') # Rendre deterministe la fonction aléatoire de random.sample if rand_seed > 0: random.seed( rand_seed ) # Pour toute les micro classes for id_micro in id_micro_list : # Selon la stategie de selection nb_points_ratio = 0 while switch(sampler_strategy.lower()): if case('all'): # Le mode de selection 'all' est choisi nb_points_ratio = infoStructPointSource_dico[id_micro].nb_points infoStructPointSource_dico[id_micro].sample_points_list = range(nb_points_ratio) break if case('percent'): # Le mode de selection 'percent' est choisi id_macro_class = int(math.floor(id_micro / 100) * 100) select_ratio_class = ratio_per_class_dico[id_macro_class] nb_points_ratio = int(infoStructPointSource_dico[id_micro].nb_points * select_ratio_class / 100) infoStructPointSource_dico[id_micro].sample_points_list = random.sample(range(infoStructPointSource_dico[id_micro].nb_points), nb_points_ratio) break if case('mixte'): # Le mode de selection 'mixte' est choisi nb_points_ratio = int(infoStructPointSource_dico[id_micro].nb_points * select_ratio_floor / 100) if id_micro == min_micro_class_label : # La plus petite micro classe est concervée intégralement infoStructPointSource_dico[id_micro].sample_points_list = range(infoStructPointSource_dico[id_micro].nb_points) nb_points_ratio = min_micro_class_nb_points elif nb_points_ratio <= min_micro_class_nb_points : # Les micro classes dont le ratio de selection est inferieur au nombre de points de la plus petite classe sont égement conservées intégralement infoStructPointSource_dico[id_micro].sample_points_list = random.sample(range(infoStructPointSource_dico[id_micro].nb_points), min_micro_class_nb_points) nb_points_ratio = min_micro_class_nb_points else : # Pour toutes les autres micro classes tirage aleatoire d'un nombre de points correspondant au ratio infoStructPointSource_dico[id_micro].sample_points_list = random.sample(range(infoStructPointSource_dico[id_micro].nb_points), nb_points_ratio) break break if debug >= 2: print("MicroClass = " + str(id_micro) + ", nb_points_ratio " + str(nb_points_ratio)) appendTextFileCR(file_statistic_points, ' <StatisticPoints class="%d" value="%d" />' %(id_micro, nb_points_ratio)) appendTextFileCR(file_statistic_points, ' </Statistic>') appendTextFileCR(file_statistic_points, '</GeneralStatistics>') pending_event = cyan + "selectSamples() : " + bold + green + "End selection des points d'echantillon. " + endC if debug >= 3: print(pending_event) timeLine(path_time_log,pending_event) # 4. PREPARATION DES POINTS D'ECHANTILLON #---------------------------------------- if debug >= 3: print(cyan + "selectSamples() : " + bold + green + "Start preparation des points d'echantillon..." + endC) # Création du dico de points points_random_value_dico = {} index_dico_point = 0 for micro_class in infoStructPointSource_dico : micro_class_struct = infoStructPointSource_dico[micro_class] label_class = micro_class_struct.label_class point_attr_dico = {name_column:int(label_class), COLUMN_CLASS:int(label_class), COLUMN_ORIGINFID:0} for id_point in micro_class_struct.sample_points_list: # Recuperer les valeurs des coordonnees des points coor_x = float(xmin + (int(micro_class_struct.info_points_list[id_point] % cols) * pixel_width)) + (pixel_width / 2.0) coor_y = float(ymax - (int(micro_class_struct.info_points_list[id_point] / cols) * pixel_height)) - (pixel_height / 2.0) points_random_value_dico[index_dico_point] = [[coor_x, coor_y], point_attr_dico] del coor_x del coor_y index_dico_point += 1 del point_attr_dico del infoStructPointSource_dico pending_event = cyan + "selectSamples() : " + bold + green + "End preparation des points d'echantillon. " + endC if debug >=3: print(pending_event) timeLine(path_time_log,pending_event) # 5. CREATION DU FICHIER SHAPE DE POINTS D'ECHANTILLON #----------------------------------------------------- if debug >= 3: print(cyan + "selectSamples() : " + bold + green + "Start creation du fichier shape de points d'echantillon..." + endC) # Définir les attibuts du fichier résultat attribute_dico = {name_column:ogr.OFTInteger, COLUMN_CLASS:ogr.OFTInteger, COLUMN_ORIGINFID:ogr.OFTInteger} # Creation du fichier shape createPointsFromCoordList(attribute_dico, points_random_value_dico, sample_points_output, projection_input, format_vector) del attribute_dico del points_random_value_dico pending_event = cyan + "selectSamples() : " + bold + green + "End creation du fichier shape de points d'echantillon. " + endC if debug >=3: print(pending_event) timeLine(path_time_log,pending_event) # 6. EXTRACTION DES POINTS D'ECHANTILLONS #----------------------------------------- if debug >= 3: print(cyan + "selectSamples() : " + bold + green + "Start extraction des points d'echantillon dans l'image..." + endC) # Cas ou l'on a une seule image if len(image_input_list) == 1: # Extract sample image_input = image_input_list[0] command = "otbcli_SampleExtraction -in %s -vec %s -outfield prefix -outfield.prefix.name %s -out %s -field %s" %(image_input, sample_points_output, BAND_NAME, vector_output, name_column) if ram_otb > 0: command += " -ram %d" %(ram_otb) if debug >= 3: print(command) exitCode = os.system(command) if exitCode != 0: raise NameError(cyan + "selectSamples() : " + bold + red + "An error occured during otbcli_SampleExtraction command. See error message above." + endC) # Cas de plusieurs imagettes else : # Le repertoire de sortie repertory_output = os.path.dirname(vector_output) # Initialisation de la liste pour le multi-threading et la liste de l'ensemble des echantions locaux thread_list = [] vector_local_output_list = [] # Obtenir l'emprise des images d'entrées pour redecouper le vecteur d'echantillon d'apprentissage pour chaque image for image_input in image_input_list : # Definition des fichiers sur emprise local file_name = os.path.splitext(os.path.basename(image_input))[0] emprise_local_sample = repertory_output + os.sep + file_name + SUFFIX_SAMPLE + extension_vector vector_sample_local_output = repertory_output + os.sep + file_name + SUFFIX_VALUE + extension_vector vector_local_output_list.append(vector_sample_local_output) # Gestion sans thread... #SampleLocalExtraction(image_input, sample_points_output, emprise_local_sample, vector_sample_local_output, name_column, BAND_NAME, ram_otb, format_vector, extension_vector, save_results_intermediate) # Gestion du multi threading thread = threading.Thread(target=SampleLocalExtraction, args=(image_input, sample_points_output, emprise_local_sample, vector_sample_local_output, name_column, BAND_NAME, ram_otb, format_vector, extension_vector, save_results_intermediate)) thread.start() thread_list.append(thread) # Extraction des echantions points des images try: for thread in thread_list: thread.join() except: print(cyan + "selectSamples() : " + bold + red + "Erreur lors de l'éextaction des valeurs d'echantion : impossible de demarrer le thread" + endC, file=sys.stderr) # Fusion des multi vecteurs de points contenant les valeurs des bandes de l'image fusionVectors(vector_local_output_list, vector_output, format_vector) # Clean des vecteurs point sample local file for vector_sample_local_output in vector_local_output_list : removeVectorFile(vector_sample_local_output) if debug >= 3: print(cyan + "selectSamples() : " + bold + green + "End extraction des points d'echantillon dans l'image." + endC) # 7. CALCUL DES STATISTIQUES SUR LES VALEURS DES POINTS D'ECHANTILLONS SELECTIONNEES #----------------------------------------------------------------------------------- if debug >= 3: print(cyan + "selectSamples() : " + bold + green + "Start calcul des statistiques sur les valeurs des points d'echantillons selectionnees..." + endC) # Si le calcul des statistiques est demandé presence du fichier stat if table_statistics_output != "": # On récupère la liste de données pending_event = cyan + "selectSamples() : " + bold + green + "Encours calcul des statistiques part1... " + endC if debug >=4: print(pending_event) timeLine(path_time_log,pending_event) attribute_name_dico = {} name_field_value_list = [] names_attribut_list = getAttributeNameList(vector_output, format_vector) if debug >=4: print("names_attribut_list = " + str(names_attribut_list)) attribute_name_dico[name_column] = ogr.OFTInteger for name_attribut in names_attribut_list : if BAND_NAME in name_attribut : attribute_name_dico[name_attribut] = ogr.OFTReal name_field_value_list.append(name_attribut) name_field_value_list.sort() res_values_dico = getAttributeValues(vector_output, None, None, attribute_name_dico, format_vector) del attribute_name_dico # Trie des données par identifiant micro classes pending_event = cyan + "selectSamples() : " + bold + green + "Encours calcul des statistiques part2... " + endC if debug >=4: print(pending_event) timeLine(path_time_log,pending_event) data_value_by_micro_class_dico = {} stat_by_micro_class_dico = {} # Initilisation du dico complexe for id_micro in id_micro_list : data_value_by_micro_class_dico[id_micro] = {} stat_by_micro_class_dico[id_micro] = {} for name_field_value in res_values_dico : if name_field_value != name_column : data_value_by_micro_class_dico[id_micro][name_field_value] = [] stat_by_micro_class_dico[id_micro][name_field_value] = {} stat_by_micro_class_dico[id_micro][name_field_value][AVERAGE] = 0.0 stat_by_micro_class_dico[id_micro][name_field_value][STANDARD_DEVIATION] = 0.0 # Trie des valeurs pending_event = cyan + "selectSamples() : " + bold + green + "Encours calcul des statistiques part3... " + endC if debug >=4: print(pending_event) timeLine(path_time_log,pending_event) for index in range(len(res_values_dico[name_column])) : id_micro = res_values_dico[name_column][index] for name_field_value in name_field_value_list : data_value_by_micro_class_dico[id_micro][name_field_value].append(res_values_dico[name_field_value][index]) del res_values_dico # Calcul des statistiques pending_event = cyan + "selectSamples() : " + bold + green + "Encours calcul des statistiques part4... " + endC if debug >=4: print(pending_event) timeLine(path_time_log,pending_event) for id_micro in id_micro_list : for name_field_value in name_field_value_list : try : stat_by_micro_class_dico[id_micro][name_field_value][AVERAGE] = average(data_value_by_micro_class_dico[id_micro][name_field_value]) except: stat_by_micro_class_dico[id_micro][name_field_value][AVERAGE] = 0 try : stat_by_micro_class_dico[id_micro][name_field_value][STANDARD_DEVIATION] = standardDeviation(data_value_by_micro_class_dico[id_micro][name_field_value]) except: stat_by_micro_class_dico[id_micro][name_field_value][STANDARD_DEVIATION] = 0 try : stat_by_micro_class_dico[id_micro][name_field_value][NB_POINTS] = len(data_value_by_micro_class_dico[id_micro][name_field_value]) except: stat_by_micro_class_dico[id_micro][name_field_value][NB_POINTS] = 0 del data_value_by_micro_class_dico # Creation du fichier statistique .csv pending_event = cyan + "selectSamples() : " + bold + green + "Encours calcul des statistiques part5... " + endC if debug >= 4: print(pending_event) timeLine(path_time_log,pending_event) text_csv = " Micro classes ; Champs couche image ; Nombre de points ; Moyenne ; Ecart type \n" writeTextFile(table_statistics_output, text_csv) for id_micro in id_micro_list : for name_field_value in name_field_value_list : # Ecriture du fichier text_csv = " %d " %(id_micro) text_csv += " ; %s" %(name_field_value) text_csv += " ; %d" %(stat_by_micro_class_dico[id_micro][name_field_value][NB_POINTS]) text_csv += " ; %f" %(stat_by_micro_class_dico[id_micro][name_field_value][AVERAGE]) text_csv += " ; %f" %(stat_by_micro_class_dico[id_micro][name_field_value][STANDARD_DEVIATION]) appendTextFileCR(table_statistics_output, text_csv) del name_field_value_list else : if debug >=3: print(cyan + "selectSamples() : " + bold + green + "Pas de calcul des statistiques sur les valeurs des points demander!!!." + endC) del id_micro_list pending_event = cyan + "selectSamples() : " + bold + green + "End calcul des statistiques sur les valeurs des points d'echantillons selectionnees. " + endC if debug >= 3: print(pending_event) timeLine(path_time_log,pending_event) # 8. SUPRESSION DES FICHIERS INTERMEDIAIRES #------------------------------------------ if not save_results_intermediate: if os.path.isfile(sample_points_output) : removeVectorFile(sample_points_output) print(cyan + "selectSamples() : " + bold + green + "FIN DE LA SELECTION DE POINTS" + endC) # Mise à jour du Log ending_event = "selectSamples() : Select points in raster mask macro input ending : " timeLine(path_time_log,ending_event) return