def addDataBaseExo(image_input, image_classif_add_output, class_file_dico, class_buffer_dico, class_sql_dico, path_time_log, format_vector='ESRI Shapefile', extension_raster=".tif", extension_vector=".shp", save_results_intermediate=False, overwrite=True, simplifie_param=10.0): # Mise à jour du Log starting_event = "addDataBaseExo() : Add data base exogene to classification starting : " timeLine(path_time_log, starting_event) # Print if debug >= 3: print(bold + green + "Variables dans la fonction" + endC) print(cyan + "addDataBaseExo() : " + endC + "image_input : " + str(image_input) + endC) print(cyan + "addDataBaseExo() : " + endC + "image_classif_add_output : " + str(image_classif_add_output) + endC) print(cyan + "addDataBaseExo() : " + endC + "class_file_dico : " + str(class_file_dico) + endC) print(cyan + "addDataBaseExo() : " + endC + "class_buffer_dico : " + str(class_buffer_dico) + endC) print(cyan + "addDataBaseExo() : " + endC + "class_sql_dico : " + str(class_sql_dico) + endC) print(cyan + "addDataBaseExo() : " + endC + "path_time_log : " + str(path_time_log) + endC) print(cyan + "addDataBaseExo() : " + endC + "format_vector : " + str(format_vector) + endC) print(cyan + "addDataBaseExo() : " + endC + "extension_raster : " + str(extension_raster) + endC) print(cyan + "addDataBaseExo() : " + endC + "extension_vector : " + str(extension_vector) + endC) print(cyan + "addDataBaseExo() : " + endC + "save_results_intermediate : " + str(save_results_intermediate) + endC) print(cyan + "addDataBaseExo() : " + endC + "overwrite : " + str(overwrite) + endC) # Constantes FOLDER_MASK_TEMP = 'Mask_' FOLDER_FILTERING_TEMP = 'Filt_' FOLDER_CUTTING_TEMP = 'Cut_' FOLDER_BUFF_TEMP = 'Buff_' SUFFIX_MASK_CRUDE = '_mcrude' SUFFIX_MASK = '_mask' SUFFIX_FUSION = '_info' SUFFIX_VECTOR_FILTER = "_filt" SUFFIX_VECTOR_CUT = '_decoup' SUFFIX_VECTOR_BUFF = '_buff' CODAGE = "uint16" # ETAPE 1 : NETTOYER LES DONNEES EXISTANTES if debug >= 2: print(cyan + "addDataBaseExo() : " + bold + green + "NETTOYAGE ESPACE DE TRAVAIL..." + endC) # Nom de base de l'image image_name = os.path.splitext(os.path.basename(image_input))[0] # Nettoyage d'anciennes données résultat # Si le fichier résultat existent deja et que overwrite n'est pas activé check = os.path.isfile(image_classif_add_output) if check and not overwrite: print(bold + yellow + "addDataBaseExo() : " + endC + image_classif_add_output + " has already added bd exo and will not be added again." + endC) else: if check: try: removeFile(image_classif_add_output ) # Tentative de suppression du fichier except Exception: pass # Si le fichier ne peut pas être supprimé, on suppose qu'il n'existe pas et on passe à la suite # Définition des répertoires temporaires repertory_output = os.path.dirname(image_classif_add_output) repertory_mask_temp = repertory_output + os.sep + FOLDER_MASK_TEMP + image_name repertory_samples_filtering_temp = repertory_output + os.sep + FOLDER_FILTERING_TEMP + image_name repertory_samples_cutting_temp = repertory_output + os.sep + FOLDER_CUTTING_TEMP + image_name repertory_samples_buff_temp = repertory_output + os.sep + FOLDER_BUFF_TEMP + image_name if debug >= 4: print(repertory_mask_temp) print(repertory_samples_filtering_temp) print(repertory_samples_cutting_temp) print(repertory_samples_buff_temp) # Creer les répertoires temporaire si ils n'existent pas if not os.path.isdir(repertory_output): os.makedirs(repertory_output) if not os.path.isdir(repertory_mask_temp): os.makedirs(repertory_mask_temp) if not os.path.isdir(repertory_samples_filtering_temp): os.makedirs(repertory_samples_filtering_temp) if not os.path.isdir(repertory_samples_cutting_temp): os.makedirs(repertory_samples_cutting_temp) if not os.path.isdir(repertory_samples_buff_temp): os.makedirs(repertory_samples_buff_temp) # Nettoyer les répertoires temporaire si ils ne sont pas vide cleanTempData(repertory_mask_temp) cleanTempData(repertory_samples_filtering_temp) cleanTempData(repertory_samples_cutting_temp) cleanTempData(repertory_samples_buff_temp) if debug >= 2: print(cyan + "addDataBaseExo() : " + bold + green + "... FIN NETTOYAGE" + endC) # ETAPE 2 : CREER UN SHAPE DE DECOUPE if debug >= 2: print(cyan + "addDataBaseExo() : " + bold + green + "SHAPE DE DECOUPE..." + endC) # 2.1 : Création des masques délimitant l'emprise de la zone par image vector_mask = repertory_mask_temp + os.sep + image_name + SUFFIX_MASK_CRUDE + extension_vector createVectorMask(image_input, vector_mask) # 2.2 : Simplification du masque global vector_simple_mask_cut = repertory_mask_temp + os.sep + image_name + SUFFIX_MASK + extension_vector simplifyVector(vector_mask, vector_simple_mask_cut, simplifie_param, format_vector) if debug >= 2: print(cyan + "addDataBaseExo() : " + bold + green + "...FIN SHAPE DE DECOUPEE" + endC) # ETAPE 3 : DECOUPER BUFFERISER LES VECTEURS ET FUSIONNER if debug >= 2: print(cyan + "addDataBaseExo() : " + bold + green + "MISE EN PLACE DES TAMPONS..." + endC) image_combined_list = [] # Parcours du dictionnaire associant les macroclasses aux noms de fichiers for macroclass_label in class_file_dico: vector_fusion_list = [] for index_info in range(len(class_file_dico[macroclass_label])): input_vector = class_file_dico[macroclass_label][index_info] vector_name = os.path.splitext( os.path.basename(input_vector))[0] output_vector_filtered = repertory_samples_filtering_temp + os.sep + vector_name + SUFFIX_VECTOR_FILTER + extension_vector output_vector_cut = repertory_samples_cutting_temp + os.sep + vector_name + SUFFIX_VECTOR_CUT + extension_vector output_vector_buff = repertory_samples_buff_temp + os.sep + vector_name + SUFFIX_VECTOR_BUFF + extension_vector sql_expression = class_sql_dico[macroclass_label][index_info] buffer_str = class_buffer_dico[macroclass_label][index_info] buff = 0.0 col_name_buf = "" try: buff = float(buffer_str) except: col_name_buf = buffer_str print( cyan + "addDataBaseExo() : " + bold + green + "Pas de valeur buffer mais un nom de colonne pour les valeur à bufferiser : " + endC + col_name_buf) if os.path.isfile(input_vector): if debug >= 3: print(cyan + "addDataBaseExo() : " + endC + "input_vector : " + str(input_vector) + endC) print(cyan + "addDataBaseExo() : " + endC + "output_vector_filtered : " + str(output_vector_filtered) + endC) print(cyan + "addDataBaseExo() : " + endC + "output_vector_cut : " + str(output_vector_cut) + endC) print(cyan + "addDataBaseExo() : " + endC + "output_vector_buff : " + str(output_vector_buff) + endC) print(cyan + "addDataBaseExo() : " + endC + "buff : " + str(buff) + endC) print(cyan + "addDataBaseExo() : " + endC + "sql : " + str(sql_expression) + endC) # 3.0 : Recuperer les vecteurs d'entrée et filtree selon la requete sql par ogr2ogr if sql_expression != "": names_attribut_list = getAttributeNameList( input_vector, format_vector) column = "'" for name_attribut in names_attribut_list: column += name_attribut + ", " column = column[0:len(column) - 2] column += "'" ret = filterSelectDataVector(input_vector, output_vector_filtered, column, sql_expression, format_vector) if not ret: print( cyan + "addDataBaseExo() : " + bold + yellow + "Attention problème lors du filtrage des BD vecteurs l'expression SQL %s est incorrecte" % (sql_expression) + endC) output_vector_filtered = input_vector else: print(cyan + "addDataBaseExo() : " + bold + green + "Pas de filtrage sur le fichier du nom : " + endC + output_vector_filtered) output_vector_filtered = input_vector # 3.1 : Découper le vecteur selon l'empise de l'image d'entrée cutoutVectors(vector_simple_mask_cut, [output_vector_filtered], [output_vector_cut], format_vector) # 3.2 : Bufferiser lesvecteurs découpé avec la valeur défini dans le dico ou trouver dans la base du vecteur lui même si le nom de la colonne est passée dans le dico if os.path.isfile(output_vector_cut) and ( (buff != 0) or (col_name_buf != "")): bufferVector(output_vector_cut, output_vector_buff, buff, col_name_buf, 1.0, 10, format_vector) else: print(cyan + "addDataBaseExo() : " + bold + green + "Pas de buffer sur le fichier du nom : " + endC + output_vector_cut) output_vector_buff = output_vector_cut # 3.3 : Si un shape résulat existe l'ajouté à la liste de fusion if os.path.isfile(output_vector_buff): vector_fusion_list.append(output_vector_buff) if debug >= 3: print("file for fusion : " + output_vector_buff) else: print(bold + yellow + "pas de fichiers avec ce nom : " + endC + output_vector_buff) else: print(cyan + "addDataBaseExo() : " + bold + yellow + "Pas de fichier du nom : " + endC + input_vector) # 3.4 : Fusionner les shapes transformés d'une même classe, rasterization et labelisations des vecteurs # Si une liste de fichier shape existe if not vector_fusion_list: print(bold + yellow + "Pas de fusion sans donnee a fusionnee" + endC) else: # Rasterization et BandMath des fichiers shapes raster_list = [] for vector in vector_fusion_list: if debug >= 3: print(cyan + "addDataBaseExo() : " + endC + "Rasterization : " + vector + " label : " + macroclass_label) raster_output = os.path.splitext( vector)[0] + extension_raster # Rasterisation rasterizeBinaryVector(vector, image_input, raster_output, macroclass_label, CODAGE) raster_list.append(raster_output) if debug >= 3: print(cyan + "addDataBaseExo() : " + endC + "nombre d'images a combiner : " + str(len(raster_list))) # Liste les images raster combined and sample image_combined = repertory_output + os.sep + image_name + '_' + str( macroclass_label) + SUFFIX_FUSION + extension_raster image_combined_list.append(image_combined) # Fusion des images raster en une seule mergeListRaster(raster_list, image_combined, CODAGE) if debug >= 2: print(cyan + "addDataBaseExo() : " + bold + green + "FIN DE L AFFECTATION DES TAMPONS" + endC) # ETAPE 4 : ASSEMBLAGE DE L'IMAGE CLASSEE ET DES BD EXOS if debug >= 2: print(cyan + "addDataBaseExo() : " + bold + green + "ASSEMBLAGE..." + endC) # Ajout de l'image de classification a la liste des image bd conbinées image_combined_list.append(image_input) # Fusion les images avec la classification mergeListRaster(image_combined_list, image_classif_add_output, CODAGE) if debug >= 2: print(cyan + "addDataBaseExo() : " + bold + green + "FIN" + endC) # ETAPE 5 : SUPPRESIONS FICHIERS INTERMEDIAIRES INUTILES # Suppression des données intermédiaires if not save_results_intermediate: image_combined_list.remove(image_input) for to_delete in image_combined_list: removeFile(to_delete) # Suppression des repertoires temporaires deleteDir(repertory_mask_temp) deleteDir(repertory_samples_filtering_temp) deleteDir(repertory_samples_cutting_temp) deleteDir(repertory_samples_buff_temp) # Mise à jour du Log ending_event = "addDataBaseExo() : Add data base exogene to classification ending : " timeLine(path_time_log, ending_event) return
def estimateQualityMns(image_input, vector_cut_input, vector_sample_input_list, vector_sample_points_input, raster_input_dico, vector_output, no_data_value, path_time_log, format_raster='GTiff', epsg=2154, format_vector='ESRI Shapefile', extension_raster=".tif", extension_vector=".shp", save_results_intermediate=False, overwrite=True): # Mise à jour du Log starting_event = "estimateQualityMns() : Masks creation starting : " timeLine(path_time_log, starting_event) print(endC) print(bold + green + "## START : CREATE HEIGHT POINTS FILE FROM MNS" + endC) print(endC) if debug >= 2: print(bold + green + "estimateQualityMns() : Variables dans la fonction" + endC) print(cyan + "estimateQualityMns() : " + endC + "image_input : " + str(image_input) + endC) print(cyan + "estimateQualityMns() : " + endC + "vector_cut_input : " + str(vector_cut_input) + endC) print(cyan + "estimateQualityMns() : " + endC + "vector_sample_input_list : " + str(vector_sample_input_list) + endC) print(cyan + "estimateQualityMns() : " + endC + "vector_sample_points_input : " + str(vector_sample_points_input) + endC) print(cyan + "estimateQualityMns() : " + endC + "raster_input_dico : " + str(raster_input_dico) + endC) print(cyan + "estimateQualityMns() : " + endC + "vector_output : " + str(vector_output) + endC) print(cyan + "estimateQualityMns() : " + endC + "no_data_value : " + str(no_data_value)) print(cyan + "estimateQualityMns() : " + endC + "path_time_log : " + str(path_time_log) + endC) print(cyan + "estimateQualityMns() : " + endC + "epsg : " + str(epsg) + endC) print(cyan + "estimateQualityMns() : " + endC + "format_raster : " + str(format_raster) + endC) print(cyan + "estimateQualityMns() : " + endC + "format_vector : " + str(format_vector) + endC) print(cyan + "estimateQualityMns() : " + endC + "extension_raster : " + str(extension_raster) + endC) print(cyan + "estimateQualityMns() : " + endC + "extension_vector : " + str(extension_vector) + endC) print(cyan + "estimateQualityMns() : " + endC + "save_results_intermediate : " + str(save_results_intermediate) + endC) print(cyan + "estimateQualityMns() : " + endC + "overwrite : " + str(overwrite) + endC) # Définion des constantes EXT_DBF = '.dbf' EXT_CSV = '.csv' CODAGE = "uint16" SUFFIX_STUDY = '_study' SUFFIX_CUT = '_cut' SUFFIX_TEMP = '_temp' SUFFIX_CLEAN = '_clean' SUFFIX_SAMPLE = '_sample' ATTRIBUTE_ID = "ID" ATTRIBUTE_Z_INI = "Z_INI" ATTRIBUTE_Z_FIN = "Z_FIN" ATTRIBUTE_PREC_ALTI = "PREC_ALTI" ATTRIBUTE_Z_REF = "Z_Ref" ATTRIBUTE_Z_MNS = "Z_Mns" ATTRIBUTE_Z_DELTA = "Z_Delta" ERODE_EDGE_POINTS = -1.0 ERROR_VALUE = -99.0 ERROR_MIN_VALUE = -9999 ERROR_MAX_VALUE = 9999 # ETAPE 0 : PREPARATION DES FICHIERS INTERMEDIAIRES # Si le fichier de sortie existe on ecrase check = os.path.isfile(vector_output) if check and not overwrite: # Si un fichier de sortie avec le même nom existe déjà, et si l'option ecrasement est à false, alors FIN print(cyan + "estimateQualityMns() : " + bold + yellow + "Create file %s already exist : no actualisation" % (vector_output) + endC) return if os.path.isfile(os.path.splitext(vector_output)[0] + EXT_CSV): removeFile(os.path.splitext(vector_output)[0] + EXT_CSV) repertory_output = os.path.dirname(vector_output) base_name = os.path.splitext(os.path.basename(vector_output))[0] vector_output_temp = repertory_output + os.sep + base_name + SUFFIX_TEMP + extension_vector raster_study = repertory_output + os.sep + base_name + SUFFIX_STUDY + extension_raster vector_study = repertory_output + os.sep + base_name + SUFFIX_STUDY + extension_vector vector_study_clean = repertory_output + os.sep + base_name + SUFFIX_STUDY + SUFFIX_CLEAN + extension_vector image_cut = repertory_output + os.sep + base_name + SUFFIX_CUT + extension_raster vector_sample_temp = repertory_output + os.sep + base_name + SUFFIX_SAMPLE + SUFFIX_TEMP + extension_vector vector_sample_temp_clean = repertory_output + os.sep + base_name + SUFFIX_SAMPLE + SUFFIX_TEMP + SUFFIX_CLEAN + extension_vector # Utilisation des données raster externes raster_cut_dico = {} for raster_input in raster_input_dico: base_name_raster = os.path.splitext(os.path.basename(raster_input))[0] raster_cut = repertory_output + os.sep + base_name_raster + SUFFIX_CUT + extension_raster raster_cut_dico[raster_input] = raster_cut if os.path.exists(raster_cut): removeFile(raster_cut) # ETAPE 1 : DEFINIR UN SHAPE ZONE D'ETUDE if (not vector_cut_input is None) and (vector_cut_input != "") and ( os.path.isfile(vector_cut_input)): cutting_action = True vector_study = vector_cut_input else: cutting_action = False createVectorMask(image_input, vector_study) # ETAPE 2 : DECOUPAGE DU RASTEUR PAR LE VECTEUR D'ETUDE SI BESOIN ET REECHANTILLONAGE SI BESOIN if cutting_action: # Identification de la tailles de pixels en x et en y du fichier MNS de reference pixel_size_x, pixel_size_y = getPixelWidthXYImage(image_input) # Si le fichier de sortie existe deja le supprimer if os.path.exists(image_cut): removeFile(image_cut) # Commande de découpe if not cutImageByVector(vector_study, image_input, image_cut, pixel_size_x, pixel_size_y, no_data_value, 0, format_raster, format_vector): print( cyan + "estimateQualityMns() : " + bold + red + "Une erreur c'est produite au cours du decoupage de l'image : " + image_input + endC, file=sys.stderr) raise if debug >= 2: print(cyan + "estimateQualityMns() : " + bold + green + "DECOUPAGE DU RASTER %s AVEC LE VECTEUR %s" % (image_input, vector_study) + endC) else: image_cut = image_input # Definir l'emprise du fichier MNS de reference # Decoupage de chaque raster de la liste des rasters for raster_input in raster_input_dico: raster_cut = raster_cut_dico[raster_input] if not cutImageByVector(vector_study, raster_input, raster_cut, pixel_size_x, pixel_size_y, no_data_value, 0, format_raster, format_vector): raise NameError( cyan + "estimateQualityMns() : " + bold + red + "Une erreur c'est produite au cours du decoupage du raster : " + raster_input + endC) # Gémotrie de l'image pixel_size_x, pixel_size_y = getPixelWidthXYImage(image_cut) cols, rows, bands = getGeometryImage(image_cut) xmin, xmax, ymin, ymax = getEmpriseImage(image_cut) if debug >= 3: print("Geometrie Image : ") print(" cols = " + str(cols)) print(" rows = " + str(rows)) print(" xmin = " + str(xmin)) print(" xmax = " + str(xmax)) print(" ymin = " + str(ymin)) print(" ymax = " + str(ymax)) print(" pixel_size_x = " + str(pixel_size_x)) print(" pixel_size_y = " + str(pixel_size_y)) print("\n") # Création du dico coordonnées des points en systeme cartographique points_random_value_dico = {} # liste coordonnées des points au format matrice image brute points_coordonnees_image_list = [] # Selon que l'on utilise le fichier de points d'echantillons ou que l'on recréé a partir des sommets des vecteurs lignes if (vector_sample_points_input is None) or (vector_sample_points_input == ""): # ETAPE 3 : DECOUPAGES DES VECTEURS DE REFERENCE D'ENTREE PAR LE VECTEUR D'ETUDE ET LEUR FUSION ET # LECTURE D'UN VECTEUR DE LIGNES ET SAUVEGARDE DES COORDONNEES POINTS DES EXTREMITEES ET LEUR HAUTEUR # Découpage des vecteurs de bd réference avec le vecteur zone d'étude vector_sample_input_cut_list = [] for vector_sample in vector_sample_input_list: vector_name = os.path.splitext(os.path.basename(vector_sample))[0] vector_sample_cut = repertory_output + os.sep + vector_name + SUFFIX_CUT + extension_vector vector_sample_input_cut_list.append(vector_sample_cut) cutoutVectors(vector_study, vector_sample_input_list, vector_sample_input_cut_list, format_vector) # Fusion des vecteurs de bd réference découpés fusionVectors(vector_sample_input_cut_list, vector_sample_temp, format_vector) # Preparation des colonnes names_column_start_point_list = [ ATTRIBUTE_ID, ATTRIBUTE_Z_INI, ATTRIBUTE_PREC_ALTI ] names_column_end_point_list = [ ATTRIBUTE_ID, ATTRIBUTE_Z_FIN, ATTRIBUTE_PREC_ALTI ] fields_list = [ ATTRIBUTE_ID, ATTRIBUTE_PREC_ALTI, ATTRIBUTE_Z_INI, ATTRIBUTE_Z_FIN ] multigeometries2geometries(vector_sample_temp, vector_sample_temp_clean, fields_list, "MULTILINESTRING", format_vector) points_coordinates_dico = readVectorFileLinesExtractTeminalsPoints( vector_sample_temp_clean, names_column_start_point_list, names_column_end_point_list, format_vector) else: # ETAPE 3_BIS : DECOUPAGE DE VECTEURS D'ECHANTILLONS POINTS PAR LE VECTEUR D'EMPRISE ET # LECTURE DES COORDONNES D'ECHANTILLONS DURECTEMENT DANS LE FICHIER VECTEUR POINTS # Liste coordonnées des points au format matrice image brute cutVectorAll(vector_study, vector_sample_points_input, vector_sample_temp, format_vector) points_coordinates_dico = readVectorFilePoints(vector_sample_temp, format_vector) # ETAPE 4 : PREPARATION DU VECTEUR DE POINTS for index_key in points_coordinates_dico: # Recuperer les valeurs des coordonnees coord_info_list = points_coordinates_dico[index_key] coor_x = coord_info_list[0] coor_y = coord_info_list[1] attribut_dico = coord_info_list[2] # Coordonnées des points au format matrice image pos_x = int(round((coor_x - xmin) / abs(pixel_size_x)) - 1) pos_y = int(round((ymax - coor_y) / abs(pixel_size_y)) - 1) if pos_x < 0: pos_x = 0 if pos_x >= cols: pos_x = cols - 1 if pos_y < 0: pos_y = 0 if pos_y >= rows: pos_y = rows - 1 coordonnees_list = [pos_x, pos_y] points_coordonnees_image_list.append(coordonnees_list) value_ref = 0.0 if ATTRIBUTE_Z_INI in attribut_dico.keys(): value_ref = float(attribut_dico[ATTRIBUTE_Z_INI]) if ATTRIBUTE_Z_FIN in attribut_dico.keys(): value_ref = float(attribut_dico[ATTRIBUTE_Z_FIN]) precision_alti = 0.0 if ATTRIBUTE_PREC_ALTI in attribut_dico.keys(): precision_alti = float(attribut_dico[ATTRIBUTE_PREC_ALTI]) point_attr_dico = { ATTRIBUTE_ID: index_key, ATTRIBUTE_Z_REF: value_ref, ATTRIBUTE_PREC_ALTI: precision_alti, ATTRIBUTE_Z_MNS: 0.0, ATTRIBUTE_Z_DELTA: 0.0 } for raster_input in raster_input_dico: field_name = raster_input_dico[raster_input][0][0] point_attr_dico[field_name] = 0.0 points_random_value_dico[index_key] = [[coor_x, coor_y], point_attr_dico] # ETAPE 5 : LECTURE DES DONNEES DE HAUTEURS ISSU DU MNS et autre raster # Lecture dans le fichier raster des valeurs values_height_list = getPixelsValueListImage( image_cut, points_coordonnees_image_list) values_others_dico = {} for raster_input in raster_input_dico: raster_cut = raster_cut_dico[raster_input] values_list = getPixelsValueListImage(raster_cut, points_coordonnees_image_list) values_others_dico[raster_input] = values_list for i in range(len(points_random_value_dico)): value_mns = values_height_list[i] value_ref = points_random_value_dico[i][1][ATTRIBUTE_Z_REF] points_random_value_dico[i][1][ATTRIBUTE_Z_MNS] = float(value_mns) precision_alti = points_random_value_dico[i][1][ATTRIBUTE_PREC_ALTI] points_random_value_dico[i][1][ATTRIBUTE_PREC_ALTI] = float( precision_alti) value_diff = value_ref - value_mns points_random_value_dico[i][1][ATTRIBUTE_Z_DELTA] = float(value_diff) for raster_input in raster_input_dico: field_name = raster_input_dico[raster_input][0][0] value_other = values_others_dico[raster_input][i] points_random_value_dico[i][1][field_name] = float(value_other) # ETAPE 6 : CREATION D'UN VECTEUR DE POINTS AVEC DONNEE COORDONNES POINT ET HAUTEUR REFERENCE ET MNS # Suppression des points contenant des valeurs en erreur et en dehors du filtrage points_random_value_dico_clean = {} for i in range(len(points_random_value_dico)): value_ref = points_random_value_dico[i][1][ATTRIBUTE_Z_REF] if value_ref != ERROR_VALUE and value_ref > ERROR_MIN_VALUE and value_ref < ERROR_MAX_VALUE: points_is_valid = True for raster_input in raster_input_dico: if len(raster_input_dico[raster_input]) > 1 and len( raster_input_dico[raster_input][1]) > 1: threshold_min = float( raster_input_dico[raster_input][1][0]) threshold_max = float( raster_input_dico[raster_input][1][1]) field_name = raster_input_dico[raster_input][0][0] value_raster = float( points_random_value_dico[i][1][field_name]) if value_raster < threshold_min or value_raster > threshold_max: points_is_valid = False if points_is_valid: points_random_value_dico_clean[i] = points_random_value_dico[i] # Définir les attibuts du fichier résultat attribute_dico = { ATTRIBUTE_ID: ogr.OFTInteger, ATTRIBUTE_PREC_ALTI: ogr.OFTReal, ATTRIBUTE_Z_REF: ogr.OFTReal, ATTRIBUTE_Z_MNS: ogr.OFTReal, ATTRIBUTE_Z_DELTA: ogr.OFTReal } for raster_input in raster_input_dico: field_name = raster_input_dico[raster_input][0][0] attribute_dico[field_name] = ogr.OFTReal createPointsFromCoordList(attribute_dico, points_random_value_dico_clean, vector_output_temp, epsg, format_vector) # Suppression des points en bord de zone d'étude bufferVector(vector_study, vector_study_clean, ERODE_EDGE_POINTS, "", 1.0, 10, format_vector) cutVectorAll(vector_study_clean, vector_output_temp, vector_output, True, format_vector) # ETAPE 7 : TRANSFORMATION DU FICHIER .DBF EN .CSV dbf_file = repertory_output + os.sep + base_name + EXT_DBF csv_file = repertory_output + os.sep + base_name + EXT_CSV if debug >= 2: print(cyan + "estimateQualityMns() : " + bold + green + "Conversion du fichier DBF %s en fichier CSV %s" % (dbf_file, csv_file) + endC) convertDbf2Csv(dbf_file, csv_file) # ETAPE 8 : SUPPRESIONS FICHIERS INTERMEDIAIRES INUTILES # Suppression des données intermédiaires if not save_results_intermediate: if cutting_action: if os.path.isfile(image_cut): removeFile(image_cut) else: if os.path.isfile(vector_study): removeVectorFile(vector_study) for raster_input in raster_input_dico: raster_cut = raster_cut_dico[raster_input] if os.path.isfile(raster_cut): removeFile(raster_cut) if os.path.isfile(vector_output_temp): removeVectorFile(vector_output_temp) if os.path.isfile(vector_study_clean): removeVectorFile(vector_study_clean) if os.path.isfile(vector_sample_temp): removeVectorFile(vector_sample_temp) if os.path.isfile(vector_sample_temp_clean): removeVectorFile(vector_sample_temp_clean) for vector_file in vector_sample_input_cut_list: if os.path.isfile(vector_file): removeVectorFile(vector_file) print(bold + green + "## END : CREATE HEIGHT POINTS FILE FROM MNSE" + endC) # Mise à jour du Log ending_event = "estimateQualityMns() : Masks creation ending : " timeLine(path_time_log, ending_event) return
def createMnh(image_mns_input, image_mnt_input, image_threshold_input, vector_emprise_input, image_mnh_output, automatic, bd_road_vector_input_list, bd_road_buff_list, sql_road_expression_list, bd_build_vector_input_list, height_bias, threshold_bd_value, threshold_delta_h, mode_interpolation, method_interpolation, interpolation_bco_radius, simplify_vector_param, epsg, no_data_value, ram_otb, path_time_log, format_raster='GTiff', format_vector='ESRI Shapefile', extension_raster=".tif", extension_vector=".shp", save_results_intermediate=False, overwrite=True): # Mise à jour du Log starting_event = "createMnh() : MNH creation starting : " timeLine(path_time_log,starting_event) print(endC) print(bold + green + "## START : MNH CREATION" + endC) print(endC) if debug >= 2: print(bold + green + "createMnh() : Variables dans la fonction" + endC) print(cyan + "createMnh() : " + endC + "image_mns_input : " + str(image_mns_input) + endC) print(cyan + "createMnh() : " + endC + "image_mnt_input : " + str(image_mnt_input) + endC) print(cyan + "createMnh() : " + endC + "image_threshold_input : " + str(image_threshold_input) + endC) print(cyan + "createMnh() : " + endC + "vector_emprise_input : " + str(vector_emprise_input) + endC) print(cyan + "createMnh() : " + endC + "image_mnh_output : " + str(image_mnh_output) + endC) print(cyan + "createMnh() : " + endC + "automatic : " + str(automatic) + endC) print(cyan + "createMnh() : " + endC + "bd_road_vector_input_list : " + str(bd_road_vector_input_list) + endC) print(cyan + "createMnh() : " + endC + "bd_road_buff_list : " + str(bd_road_buff_list) + endC) print(cyan + "createMnh() : " + endC + "sql_road_expression_list : " + str(sql_road_expression_list) + endC) print(cyan + "createMnh() : " + endC + "bd_build_vector_input_list : " + str(bd_build_vector_input_list) + endC) print(cyan + "createMnh() : " + endC + "height_bias : " + str(height_bias) + endC) print(cyan + "createMnh() : " + endC + "threshold_bd_value : " + str(threshold_bd_value) + endC) print(cyan + "createMnh() : " + endC + "threshold_delta_h : " + str(threshold_delta_h) + endC) print(cyan + "createMnh() : " + endC + "mode_interpolation : " + str(mode_interpolation) + endC) print(cyan + "createMnh() : " + endC + "method_interpolation : " + str(method_interpolation) + endC) print(cyan + "createMnh() : " + endC + "interpolation_bco_radius : " + str(interpolation_bco_radius) + endC) print(cyan + "createMnh() : " + endC + "simplify_vector_param : " + str(simplify_vector_param) + endC) print(cyan + "createMnh() : " + endC + "epsg : " + str(epsg) + endC) print(cyan + "createMnh() : " + endC + "no_data_value : " + str(no_data_value) + endC) print(cyan + "createMnh() : " + endC + "ram_otb : " + str(ram_otb) + endC) print(cyan + "createMnh() : " + endC + "path_time_log : " + str(path_time_log) + endC) print(cyan + "createMnh() : " + endC + "format_raster : " + str(format_raster) + endC) print(cyan + "createMnh() : " + endC + "format_vector : " + str(format_vector) + endC) print(cyan + "createMnh() : " + endC + "extension_raster : " + str(extension_raster) + endC) print(cyan + "createMnh() : " + endC + "extension_vector : " + str(extension_vector) + endC) print(cyan + "createMnh() : " + endC + "save_results_intermediate : " + str(save_results_intermediate) + endC) print(cyan + "createMnh() : " + endC + "overwrite : " + str(overwrite) + endC) # LES CONSTANTES PRECISION = 0.0000001 CODAGE_8B = "uint8" CODAGE_F = "float" SUFFIX_CUT = "_cut" SUFFIX_CLEAN = "_clean" SUFFIX_SAMPLE = "_sample" SUFFIX_MASK = "_mask" SUFFIX_TMP = "_tmp" SUFFIX_MNS = "_mns" SUFFIX_MNT = "_mnt" SUFFIX_ROAD = "_road" SUFFIX_BUILD = "_build" SUFFIX_RASTER = "_raster" SUFFIX_VECTOR = "_vector" # DEFINIR LES REPERTOIRES ET FICHIERS TEMPORAIRES repertory_output = os.path.dirname(image_mnh_output) basename_mnh = os.path.splitext(os.path.basename(image_mnh_output))[0] sub_repertory_raster_temp = repertory_output + os.sep + basename_mnh + SUFFIX_RASTER + SUFFIX_TMP sub_repertory_vector_temp = repertory_output + os.sep + basename_mnh + SUFFIX_VECTOR + SUFFIX_TMP cleanTempData(sub_repertory_raster_temp) cleanTempData(sub_repertory_vector_temp) basename_vector_emprise = os.path.splitext(os.path.basename(vector_emprise_input))[0] basename_mns_input = os.path.splitext(os.path.basename(image_mns_input))[0] basename_mnt_input = os.path.splitext(os.path.basename(image_mnt_input))[0] image_mnh_tmp = sub_repertory_raster_temp + os.sep + basename_mnh + SUFFIX_TMP + extension_raster image_mnh_road = sub_repertory_raster_temp + os.sep + basename_mnh + SUFFIX_ROAD + extension_raster vector_bd_bati_temp = sub_repertory_vector_temp + os.sep + basename_mnh + SUFFIX_BUILD + SUFFIX_TMP + extension_vector vector_bd_bati = repertory_output + os.sep + basename_mnh + SUFFIX_BUILD + extension_vector raster_bd_bati = sub_repertory_vector_temp + os.sep + basename_mnh + SUFFIX_BUILD + extension_raster removeVectorFile(vector_bd_bati) image_emprise_mnt_mask = sub_repertory_raster_temp + os.sep + basename_vector_emprise + SUFFIX_MNT + extension_raster image_mnt_cut = sub_repertory_raster_temp + os.sep + basename_mnt_input + SUFFIX_CUT + extension_raster image_mnt_clean = sub_repertory_raster_temp + os.sep + basename_mnt_input + SUFFIX_CLEAN + extension_raster image_mnt_clean_sample = sub_repertory_raster_temp + os.sep + basename_mnt_input + SUFFIX_CLEAN + SUFFIX_SAMPLE + extension_raster image_emprise_mns_mask = sub_repertory_raster_temp + os.sep + basename_vector_emprise + SUFFIX_MNS + extension_raster image_mns_cut = sub_repertory_raster_temp + os.sep + basename_mns_input + SUFFIX_CUT + extension_raster image_mns_clean = sub_repertory_raster_temp + os.sep + basename_mns_input + SUFFIX_CLEAN + extension_raster vector_bd_road_temp = sub_repertory_vector_temp + os.sep + basename_mnh + SUFFIX_ROAD + SUFFIX_TMP + extension_vector raster_bd_road_mask = sub_repertory_raster_temp + os.sep + basename_mnh + SUFFIX_ROAD + SUFFIX_MASK + extension_raster if image_threshold_input != "" : basename_threshold_input = os.path.splitext(os.path.basename(image_threshold_input))[0] image_threshold_cut = sub_repertory_raster_temp + os.sep + basename_threshold_input + SUFFIX_CUT + extension_raster image_threshold_mask = sub_repertory_raster_temp + os.sep + basename_threshold_input + SUFFIX_MASK + extension_raster # VERIFICATION SI LE FICHIER DE SORTIE EXISTE DEJA # Si un fichier de sortie avec le même nom existe déjà, et si l'option ecrasement est à false, alors on ne fait rien check = os.path.isfile(image_mnh_output) if check and not overwrite: print(bold + yellow + "createMnh() : " + endC + "Create mnh %s from %s and %s already done : no actualisation" % (image_mnh_output, image_mns_input, image_mnt_input) + endC) # Si non, ou si la fonction ecrasement est désative, alors on le calcule else: if check: try: # Suppression de l'éventuel fichier existant removeFile(image_mnh_output) except Exception: pass # Si le fichier ne peut pas être supprimé, on suppose qu'il n'existe pas et on passe à la suite # DECOUPAGE DES FICHIERS MS ET MNT D'ENTREE PAR LE FICHIER D'EMPRISE if debug >= 3: print(bold + green + "createMnh() : " + endC + "Decoupage selon l'emprise des fichiers %s et %s " %(image_mns_input, image_mnt_input) + endC) # Fonction de découpe du mns if not cutImageByVector(vector_emprise_input, image_mns_input, image_mns_cut, None, None, no_data_value, epsg, format_raster, format_vector) : raise NameError (cyan + "createMnh() : " + bold + red + "!!! Une erreur c'est produite au cours du decoupage de l'image : " + image_mns_input + ". Voir message d'erreur." + endC) # Fonction de découpe du mnt if not cutImageByVector(vector_emprise_input, image_mnt_input, image_mnt_cut, None, None, no_data_value, epsg, format_raster, format_vector) : raise NameError (cyan + "createMnh() : " + bold + red + "!!! Une erreur c'est produite au cours du decoupage de l'image : " + image_mnt_input + ". Voir message d'erreur." + endC) if debug >= 3: print(bold + green + "createMnh() : " + endC + "Decoupage des fichiers %s et %s complet" %(image_mns_cut, image_mnt_cut) + endC) # REBOUCHAGE DES TROUS DANS LE MNT D'ENTREE SI NECESSAIRE nodata_mnt = getNodataValueImage(image_mnt_cut) pixelNodataCount = countPixelsOfValue(image_mnt_cut, nodata_mnt) if pixelNodataCount > 0 : if debug >= 3: print(bold + green + "createMnh() : " + endC + "Fill the holes MNT for %s" %(image_mnt_cut) + endC) # Rasterisation du vecteur d'emprise pour creer un masque pour boucher les trous du MNT rasterizeBinaryVector(vector_emprise_input, image_mnt_cut, image_emprise_mnt_mask, 1, CODAGE_8B) # Utilisation de SAGA pour boucher les trous fillNodata(image_mnt_cut, image_emprise_mnt_mask, image_mnt_clean, save_results_intermediate) if debug >= 3: print(bold + green + "createMnh() : " + endC + "Fill the holes MNT to %s completed" %(image_mnt_clean) + endC) else : image_mnt_clean = image_mnt_cut if debug >= 3: print(bold + green + "\ncreateMnh() : " + endC + "Fill the holes not necessary MNT for %s" %(image_mnt_cut) + endC) # REBOUCHAGE DES TROUS DANS LE MNS D'ENTREE SI NECESSAIRE nodata_mns = getNodataValueImage(image_mns_cut) pixelNodataCount = countPixelsOfValue(image_mns_cut, nodata_mns) if pixelNodataCount > 0 : if debug >= 3: print(bold + green + "createMnh() : " + endC + "Fill the holes MNS for %s" %(image_mns_cut) + endC) # Rasterisation du vecteur d'emprise pour creer un masque pour boucher les trous du MNS rasterizeBinaryVector(vector_emprise_input, image_mns_cut, image_emprise_mns_mask, 1, CODAGE_8B) # Utilisation de SAGA pour boucher les trous fillNodata(image_mns_cut, image_emprise_mns_mask, image_mns_clean, save_results_intermediate) if debug >= 3: print(bold + green + "\ncreateMnh() : " + endC + "Fill the holes MNS to %s completed" %(image_mns_clean) + endC) else : image_mns_clean = image_mns_cut if debug >= 3: print(bold + green + "createMnh() : " + endC + "Fill the holes not necessary MNS for %s" %(image_mns_cut) + endC) # CALLER LE FICHIER MNT AU FORMAT DU FICHIER MNS # Commande de mise en place de la geométrie re-echantionage command = "otbcli_Superimpose -inr " + image_mns_clean + " -inm " + image_mnt_clean + " -mode " + mode_interpolation + " -interpolator " + method_interpolation + " -out " + image_mnt_clean_sample if method_interpolation.lower() == 'bco' : command += " -interpolator.bco.radius " + str(interpolation_bco_radius) if ram_otb > 0: command += " -ram %d" %(ram_otb) if debug >= 3: print(cyan + "createMnh() : " + bold + green + "Réechantillonage du fichier %s par rapport à la reference %s" %(image_mnt_clean, image_mns_clean) + endC) print(command) exit_code = os.system(command) if exit_code != 0: print(command) raise NameError (cyan + "createMnh() : " + bold + red + "!!! Une erreur c'est produite au cours du superimpose de l'image : " + image_mnt_input + ". Voir message d'erreur." + endC) # INCRUSTATION DANS LE MNH DES DONNEES VECTEURS ROUTES if debug >= 3: print(bold + green + "createMnh() : " + endC + "Use BD road to clean MNH" + endC) # Creation d'un masque de filtrage des donnes routes (exemple : le NDVI) if image_threshold_input != "" : if not cutImageByVector(vector_emprise_input, image_threshold_input, image_threshold_cut, None, None, no_data_value, epsg, format_raster, format_vector) : raise NameError (cyan + "createMnh() : " + bold + red + "!!! Une erreur c'est produite au cours du decoupage de l'image : " + image_threshold_input + ". Voir message d'erreur." + endC) createBinaryMask(image_threshold_cut, image_threshold_mask, threshold_bd_value, False, CODAGE_8B) # Execution de la fonction createMacroSamples pour une image correspondant au données routes if bd_road_vector_input_list != [] : createMacroSamples(image_mns_clean, vector_emprise_input, vector_bd_road_temp, raster_bd_road_mask, bd_road_vector_input_list, bd_road_buff_list, sql_road_expression_list, path_time_log, basename_mnh, simplify_vector_param, format_vector, extension_vector, save_results_intermediate, overwrite) if debug >= 3: print(bold + green + "\ncreateMnh() : " + endC + "File raster from BD road is create %s" %(raster_bd_road_mask) + endC) # CALCUL DU MNH # Calcul par bandMath du MNH definir l'expression qui soustrait le MNT au MNS en introduisant le biais et en mettant les valeurs à 0 à une valeur approcher de 0.0000001 delta = "" if height_bias > 0 : delta = "+%s" %(str(height_bias)) elif height_bias < 0 : delta = "-%s" %(str(abs(height_bias))) else : delta = "" # Definition de l'expression if bd_road_vector_input_list != [] : if image_threshold_input != "" : expression = "\"im3b1 > 0 and im4b1 > 0?%s:(im1b1-im2b1%s) > 0.0?im1b1-im2b1%s:%s\"" %(str(PRECISION), delta, delta, str(PRECISION)) command = "otbcli_BandMath -il %s %s %s %s -out %s %s -exp %s" %(image_mns_clean, image_mnt_clean_sample, raster_bd_road_mask, image_threshold_mask, image_mnh_tmp, CODAGE_F, expression) else : expression = "\"im3b1 > 0?%s:(im1b1-im2b1%s) > 0.0?im1b1-im2b1%s:%s\"" %(str(PRECISION), delta, delta, str(PRECISION)) command = "otbcli_BandMath -il %s %s %s -out %s %s -exp %s" %(image_mns_clean, image_mnt_clean_sample, raster_bd_road_mask, image_mnh_tmp, CODAGE_F, expression) else : expression = "\"(im1b1-im2b1%s) > 0.0?im1b1-im2b1%s:%s\"" %(delta, delta, str(PRECISION)) command = "otbcli_BandMath -il %s %s -out %s %s -exp %s" %(image_mns_clean, image_mnt_clean_sample, image_mnh_tmp, CODAGE_F, expression) if ram_otb > 0: command += " -ram %d" %(ram_otb) if debug >= 3: print(cyan + "createMnh() : " + bold + green + "Calcul du MNH %s difference du MNS : %s par le MNT :%s" %(image_mnh_tmp, image_mns_clean, image_mnt_clean_sample) + endC) print(command) exitCode = os.system(command) if exitCode != 0: print(command) raise NameError(cyan + "createMnh() : " + bold + red + "An error occured during otbcli_BandMath command to compute MNH " + image_mnh_tmp + ". See error message above." + endC) # DECOUPAGE DU MNH if bd_build_vector_input_list == []: image_mnh_road = image_mnh_output if debug >= 3: print(bold + green + "createMnh() : " + endC + "Decoupage selon l'emprise du fichier mnh %s " %(image_mnh_tmp) + endC) # Fonction de découpe du mnh if not cutImageByVector(vector_emprise_input, image_mnh_tmp, image_mnh_road, None, None, no_data_value, epsg, format_raster, format_vector) : raise NameError (cyan + "createMnh() : " + bold + red + "!!! Une erreur c'est produite au cours du decoupage de l'image : " + image_mns_input + ". Voir message d'erreur." + endC) if debug >= 3: print(bold + green + "createMnh() : " + endC + "Decoupage du fichier mnh %s complet" %(image_mnh_road) + endC) # INCRUSTATION DANS LE MNH DES DONNEES VECTEURS BATIS # Si demander => liste de fichier vecteur bati passé en donnée d'entrée if bd_build_vector_input_list != []: # Découpage des vecteurs de bd bati exogenes avec l'emprise vectors_build_cut_list = [] for vector_build_input in bd_build_vector_input_list : vector_name = os.path.splitext(os.path.basename(vector_build_input))[0] vector_build_cut = sub_repertory_vector_temp + os.sep + vector_name + SUFFIX_CUT + extension_vector vectors_build_cut_list.append(vector_build_cut) cutoutVectors(vector_emprise_input, bd_build_vector_input_list, vectors_build_cut_list, format_vector) # Fusion des vecteurs batis découpés fusionVectors (vectors_build_cut_list, vector_bd_bati_temp) # Croisement vecteur rasteur entre le vecteur fusion des batis et le MNH créé precedement statisticsVectorRaster(image_mnh_road, vector_bd_bati_temp, "", 1, False, False, True, ['PREC_PLANI','PREC_ALTI','ORIGIN_BAT','median','sum','std','unique','range'], [], {}, path_time_log, True, format_vector, save_results_intermediate, overwrite) # Calcul de la colonne delta_H entre les hauteurs des batis et la hauteur moyenne du MNH sous le bati COLUMN_ID = "ID" COLUMN_H_BUILD = "HAUTEUR" COLUMN_H_BUILD_MIN = "Z_MIN" COLUMN_H_BUILD_MAX = "Z_MAX" COLUMN_H_MNH = "mean" COLUMN_H_MNH_MIN = "min" COLUMN_H_MNH_MAX = "max" COLUMN_H_DIFF = "H_diff" field_type = ogr.OFTReal field_value = 0.0 field_width = 20 field_precision = 2 attribute_name_dico = {} attribute_name_dico[COLUMN_ID] = ogr.OFTString attribute_name_dico[COLUMN_H_BUILD] = ogr.OFTReal attribute_name_dico[COLUMN_H_MNH] = ogr.OFTReal # Ajouter la nouvelle colonne H_diff addNewFieldVector(vector_bd_bati_temp, COLUMN_H_DIFF, field_type, field_value, field_width, field_precision, format_vector) # Recuperer les valeur de hauteur du bati et du mnt dans le vecteur data_z_dico = getAttributeValues(vector_bd_bati_temp, None, None, attribute_name_dico, format_vector) # Calculer la difference des Hauteur bati et mnt field_new_values_dico = {} for index in range(len(data_z_dico[COLUMN_ID])) : index_polygon = data_z_dico[COLUMN_ID][index] delta_h = abs(data_z_dico[COLUMN_H_BUILD][index] - data_z_dico[COLUMN_H_MNH][index]) field_new_values_dico[index_polygon] = {COLUMN_H_DIFF:delta_h} # Mettre à jour la colonne H_diff dans le vecteur setAttributeIndexValuesList(vector_bd_bati_temp, COLUMN_ID, field_new_values_dico, format_vector) # Suppression de tous les polygones bati dons la valeur du delat H est inferieur à threshold_delta_h column = "'%s, %s, %s, %s, %s, %s, %s, %s'"% (COLUMN_ID, COLUMN_H_BUILD, COLUMN_H_BUILD_MIN, COLUMN_H_BUILD_MAX, COLUMN_H_MNH, COLUMN_H_MNH_MIN, COLUMN_H_MNH_MAX, COLUMN_H_DIFF) expression = "%s > %s" % (COLUMN_H_DIFF, threshold_delta_h) filterSelectDataVector(vector_bd_bati_temp, vector_bd_bati, column, expression, overwrite, format_vector) # Attention!!!! PAUSE pour trie et verification des polygones bati nom deja present dans le MNH ou non if not automatic : print(bold + blue + "Application MnhCreation => " + endC + "Vérification manuelle du vecteur bati %s pour ne concerver que les batis non présent dans le MNH courant %s" %(vector_bd_bati_temp, image_mnh_road) + endC) input(bold + red + "Appuyez sur entree pour continuer le programme..." + endC) # Creation du masque bati avec pour H la hauteur des batiments rasterizeVector(vector_bd_bati, raster_bd_bati, image_mnh_road, COLUMN_H_BUILD) # Fusion du mask des batis et du MNH temporaire expression = "\"im1b1 > 0.0?im1b1:im2b1\"" command = "otbcli_BandMath -il %s %s -out %s %s -exp %s" %(raster_bd_bati, image_mnh_road, image_mnh_output, CODAGE_F, expression) if ram_otb > 0: command += " -ram %d" %(ram_otb) if debug >= 3: print(cyan + "createMnh() : " + bold + green + "Amelioration du MNH %s ajout des hauteurs des batis %s" %(image_mnh_road, raster_bd_bati) + endC) print(command) exitCode = os.system(command) if exitCode != 0: print(command) raise NameError(cyan + "createMnh() : " + bold + red + "An error occured during otbcli_BandMath command to compute MNH Final" + image_mnh_output + ". See error message above." + endC) # SUPPRESIONS FICHIERS INTERMEDIAIRES INUTILES # Suppression des fichiers intermédiaires if not save_results_intermediate : if bd_build_vector_input_list != []: removeFile(image_mnh_road) removeFile(image_threshold_cut) removeFile(image_threshold_mask) removeFile(raster_bd_bati) removeVectorFile(vector_bd_road_temp) removeVectorFile(vector_bd_bati_temp) removeVectorFile(vector_bd_bati) # A confirmer!!! removeFile(raster_bd_road_mask) removeFile(image_mnh_tmp) deleteDir(sub_repertory_raster_temp) deleteDir(sub_repertory_vector_temp) print(endC) print(bold + green + "## END : MNH CREATION" + endC) print(endC) # Mise à jour du Log ending_event = "createMnh() : MNH creation ending : " timeLine(path_time_log,ending_event) return
def createDifference(image_ortho_input, image_mns_input, image_mnt_input, bd_vector_input_list, zone_buffer_dico, departments_list, image_difference_output, vector_difference_output, fileld_bd_raster, simplifie_param, threshold_ndvi, threshold_difference, filter_difference_0, filter_difference_1, path_time_log, format_vector='ESRI Shapefile', extension_raster=".tif", extension_vector=".shp", save_results_intermediate=False, channel_order=['Red', 'Green', 'Blue', 'NIR'], overwrite=True): # Mise à jour du Log starting_event = "createDifference() : create macro samples starting : " timeLine(path_time_log, starting_event) # constantes CODAGE = "float" FOLDER_MASK_TEMP = 'Mask_' FOLDER_CUTTING_TEMP = 'Cut_' FOLDER_BUFF_TEMP = 'Buff_' FOLDER_RESULT_TEMP = 'Tmp_' SUFFIX_MASK_CRUDE = '_mcrude' SUFFIX_MASK = '_mask' SUFFIX_FILTERED = '_filtered' SUFFIX_VECTOR_CUT = '_decoup' SUFFIX_VECTOR_BUFF = '_buff' SUFFIX_NEW_MNS = '_new_mns' SUFFIX_DIFF_MNS = '_diff_mns' SUFFIX_NDVI = '_ndvi' # print if debug >= 3: print(bold + green + "Variables dans la fonction" + endC) print(cyan + "createDifference() : " + endC + "image_ortho_input : " + str(image_ortho_input) + endC) print(cyan + "createDifference() : " + endC + "image_mns_input : " + str(image_mns_input) + endC) print(cyan + "createDifference() : " + endC + "image_mnt_input : " + str(image_mnt_input) + endC) print(cyan + "createDifference() : " + endC + "bd_vector_input_list : " + str(bd_vector_input_list) + endC) print(cyan + "createDifference() : " + endC + "zone_buffer_dico : " + str(zone_buffer_dico) + endC) print(cyan + "createDifference() : " + endC + "departments_list : " + str(departments_list) + endC) print(cyan + "createDifference() : " + endC + "image_difference_output : " + str(image_difference_output) + endC) print(cyan + "createDifference() : " + endC + "vector_difference_output : " + str(vector_difference_output) + endC) print(cyan + "createDifference() : " + endC + "fileld_bd_raster : " + str(fileld_bd_raster) + endC) print(cyan + "createDifference() : " + endC + "simplifie_param : " + str(simplifie_param) + endC) print(cyan + "createDifference() : " + endC + "threshold_ndvi : " + str(threshold_ndvi) + endC) print(cyan + "createDifference() : " + endC + "threshold_difference : " + str(threshold_difference) + endC) print(cyan + "createDifference() : " + endC + "filter_difference_0 : " + str(filter_difference_0) + endC) print(cyan + "createDifference() : " + endC + "filter_difference_1 : " + str(filter_difference_1) + endC) print(cyan + "createDifference() : " + endC + "path_time_log : " + str(path_time_log) + endC) print(cyan + "createDifference() : " + endC + "channel_order : " + str(channel_order) + endC) print(cyan + "createDifference() : " + endC + "format_vector : " + str(format_vector) + endC) print(cyan + "createDifference() : " + endC + "extension_raster : " + str(extension_raster) + endC) print(cyan + "createDifference() : " + endC + "extension_vector : " + str(extension_vector) + endC) print(cyan + "createDifference() : " + endC + "save_results_intermediate : " + str(save_results_intermediate) + endC) print(cyan + "createDifference() : " + endC + "overwrite : " + str(overwrite) + endC) # ETAPE 1 : NETTOYER LES DONNEES EXISTANTES print(cyan + "createDifference() : " + bold + green + "NETTOYAGE ESPACE DE TRAVAIL..." + endC) # Nom de base de l'image image_name = os.path.splitext(os.path.basename(image_ortho_input))[0] # Test si le fichier résultat différence existe déjà et si il doit être écrasés check = os.path.isfile(vector_difference_output) if check and not overwrite: # Si le fichier difference existe deja et que overwrite n'est pas activé print(cyan + "createDifference() : " + bold + yellow + "File difference " + vector_difference_output + " already exists and will not be created again." + endC) else: if check: try: removeFile(vector_difference_output) except Exception: pass # si le fichier n'existe pas, il ne peut pas être supprimé : cette étape est ignorée # Définition des répertoires temporaires repertory_output = os.path.dirname(vector_difference_output) repertory_output_temp = repertory_output + os.sep + FOLDER_RESULT_TEMP + image_name repertory_mask_temp = repertory_output + os.sep + FOLDER_MASK_TEMP + image_name repertory_samples_cutting_temp = repertory_output + os.sep + FOLDER_CUTTING_TEMP + image_name repertory_samples_buff_temp = repertory_output + os.sep + FOLDER_BUFF_TEMP + image_name print(repertory_output_temp) print(repertory_mask_temp) print(repertory_samples_cutting_temp) print(repertory_samples_buff_temp) # Création des répertoires temporaire qui n'existent pas if not os.path.isdir(repertory_output_temp): os.makedirs(repertory_output_temp) if not os.path.isdir(repertory_mask_temp): os.makedirs(repertory_mask_temp) if not os.path.isdir(repertory_samples_cutting_temp): os.makedirs(repertory_samples_cutting_temp) if not os.path.isdir(repertory_samples_buff_temp): os.makedirs(repertory_samples_buff_temp) # Nettoyage des répertoires temporaire qui ne sont pas vide cleanTempData(repertory_mask_temp) cleanTempData(repertory_samples_cutting_temp) cleanTempData(repertory_samples_buff_temp) cleanTempData(repertory_output_temp) BD_topo_layers_list = [] #zone = zone_buffer_dico.keys()[0] zone = list(zone_buffer_dico)[0] # Creation liste des couches des bd exogenes utilisées for layers_buffer in zone_buffer_dico[zone]: BD_topo_layers_list.append(layers_buffer[0]) print(cyan + "createDifference() : " + bold + green + "... FIN NETTOYAGE" + endC) # ETAPE 2 : DECOUPER LES VECTEURS print(cyan + "createDifference() : " + bold + green + "DECOUPAGE ECHANTILLONS..." + endC) # 2.1 : Création du masque délimitant l'emprise de la zone par image vector_mask = repertory_mask_temp + os.sep + image_name + SUFFIX_MASK_CRUDE + extension_vector createVectorMask(image_ortho_input, vector_mask) # 2.2 : Simplification du masque vector_simple_mask = repertory_mask_temp + os.sep + image_name + SUFFIX_MASK + extension_vector simplifyVector(vector_mask, vector_simple_mask, simplifie_param, format_vector) # 2.3 : Découpage des vecteurs copiés en local avec le masque vector_output_list = [] for vector_input in bd_vector_input_list: vector_name = os.path.splitext(os.path.basename(vector_input))[0] extension = os.path.splitext(os.path.basename(vector_input))[1] vector_output = repertory_samples_cutting_temp + os.sep + vector_name + SUFFIX_VECTOR_CUT + extension vector_output_list.append(vector_output) cutoutVectors(vector_simple_mask, bd_vector_input_list, vector_output_list, format_vector) print(cyan + "createDifference() : " + bold + green + "...FIN DECOUPAGE" + endC) # ETAPE 3 : BUFFERISER LES VECTEURS print(cyan + "createDifference() : " + bold + green + "MISE EN PLACE DES TAMPONS..." + endC) # Parcours du dictionnaire associant la zone aux noms de fichiers et aux tampons associés for elem_buff in zone_buffer_dico[zone]: # Parcours des départements for dpt in departments_list: input_shape = repertory_samples_cutting_temp + os.sep + elem_buff[ 0] + "_" + dpt + SUFFIX_VECTOR_CUT + extension_vector output_shape = repertory_samples_buff_temp + os.sep + elem_buff[ 0] + "_" + dpt + SUFFIX_VECTOR_BUFF + extension_vector buff = elem_buff[1] if os.path.isfile(input_shape): if debug >= 3: print(cyan + "createDifference() : " + endC + "input_shape : " + str(input_shape) + endC) print(cyan + "createDifference() : " + endC + "output_shape : " + str(output_shape) + endC) print(cyan + "createDifference() : " + endC + "buff : " + str(buff) + endC) bufferVector(input_shape, output_shape, buff, "", 1.0, 10, format_vector) else: print(cyan + "createDifference() : " + bold + yellow + "Pas de fichier du nom : " + endC + input_shape) print(cyan + "createDifference() : " + bold + green + "FIN DE L AFFECTATION DES TAMPONS" + endC) # ETAPE 4 : FUSION DES SHAPES DE LA BD TOPO print(cyan + "createDifference() : " + bold + green + "FUSION DATA BD..." + endC) shape_buff_list = [] # Parcours du dictionnaire associant la zone au nom du fichier for elem_buff in zone_buffer_dico[zone]: # Parcours des départements for dpt in departments_list: shape_file = repertory_samples_buff_temp + os.sep + elem_buff[ 0] + "_" + dpt + SUFFIX_VECTOR_BUFF + extension_vector if os.path.isfile(shape_file): shape_buff_list.append(shape_file) print("file for fusion : " + shape_file) else: print(bold + yellow + "pas de fichiers avec ce nom : " + endC + shape_file) # si une liste de fichier shape existe if not shape_buff_list: print(bold + yellow + "Pas de fusion sans donnee a fusionnee" + endC) else: # Fusion des fichiers shape image_zone_shape = repertory_output_temp + os.sep + image_name + '_' + zone + extension_vector fusionVectors(shape_buff_list, image_zone_shape) print("File BD : " + image_zone_shape) print(cyan + "createDifference() : " + bold + green + "FIN DE LA FUSION" + endC) # ETAPE 5 : RASTERISER LE FICHIER SHAPE DE ZONE BD print(cyan + "createDifference() : " + bold + green + "RASTERIZATION DE LA FUSION..." + endC) image_zone_raster = repertory_output_temp + os.sep + image_name + '_' + zone + extension_raster rasterizeVector(image_zone_shape, image_zone_raster, image_ortho_input, fileld_bd_raster, codage=CODAGE) print(cyan + "createDifference() : " + bold + green + "FIN DE LA RASTERIZATION" + endC) # ETAPE 6 : CREER UN NOUVEAU MMS ISSU DU MNT + DATA BD_TOPO print(cyan + "createDifference() : " + bold + green + "CREATION NOUVEAU MNS..." + endC) image_new_mns_output = repertory_output_temp + os.sep + image_name + SUFFIX_NEW_MNS + extension_raster createMNS(image_ortho_input, image_mnt_input, image_zone_raster, image_new_mns_output) print(cyan + "createDifference() : " + bold + green + "FIN DE LA CREATION MNS" + endC) # ETAPE 7 : CREER D'UN MASQUE SUR LES ZONES VEGETALES print(cyan + "createDifference() : " + bold + green + "CREATION DU NDVI..." + endC) image_ndvi_output = repertory_output_temp + os.sep + image_name + SUFFIX_NDVI + extension_raster createNDVI(image_ortho_input, image_ndvi_output, channel_order) print(cyan + "createDifference() : " + bold + green + "FIN DE LA CREATION DU NDVI" + endC) print(cyan + "createDifference() : " + bold + green + "CREATION DU MASQUE NDVI..." + endC) image_ndvi_mask_output = repertory_output_temp + os.sep + image_name + SUFFIX_NDVI + SUFFIX_MASK + extension_raster createBinaryMask(image_ndvi_output, image_ndvi_mask_output, threshold_ndvi, False) print(cyan + "createDifference() : " + bold + green + "FIN DE LA CREATION DU MASQUE NDVI" + endC) # ETAPE 8 : CREER UN FICHIER DE DIFFERENCE DES MNS AVEC MASQUAGE DES ZONES VEGETALES print(cyan + "createDifference() : " + bold + green + "CREATION DIFFERENCE MNS..." + endC) #image_diff_mns_output = repertory_output + os.sep + image_name + SUFFIX_DIFF_MNS + extension_raster image_diff_mns_output = image_difference_output createDifferenceFile(image_mns_input, image_new_mns_output, image_ndvi_mask_output, image_diff_mns_output) print(cyan + "createDifference() : " + bold + green + "FIN DE LA CREATION DE LA DIFFERENCE MNS" + endC) print(cyan + "createDifference() : " + bold + green + "CREATION DU MASQUE DE DIFFERENCE..." + endC) image_diff_mns_mask_output = repertory_output_temp + os.sep + image_name + SUFFIX_DIFF_MNS + SUFFIX_MASK + extension_raster createBinaryMask(image_diff_mns_output, image_diff_mns_mask_output, threshold_difference, True) print(cyan + "createDifference() : " + bold + green + "FIN DE LA CREATION DU MASQUE DE DIFFERENCE" + endC) print(cyan + "createDifference() : " + bold + green + "FILTRAGE DU MASQUE DE DIFFERENCE..." + endC) image_diff_mns_filtered_output = repertory_output_temp + os.sep + image_name + SUFFIX_DIFF_MNS + SUFFIX_FILTERED + extension_raster filterBinaryRaster(image_diff_mns_mask_output, image_diff_mns_filtered_output, filter_difference_0, filter_difference_1) print(cyan + "createDifference() : " + bold + green + "FIN DU FILTRAGE DU MASQUE DE DIFFERENCE" + endC) # ETAPE 9 : RASTERISER LE FICHIER DE DIFFERENCE DES MNS print(cyan + "createDifference() : " + bold + green + "VECTORISATION DU RASTER DE DIFFERENCE..." + endC) vector_diff_mns_filtered_output = repertory_output_temp + os.sep + image_name + SUFFIX_DIFF_MNS + SUFFIX_FILTERED + extension_vector polygonizeRaster(image_diff_mns_filtered_output, vector_diff_mns_filtered_output, image_name, field_name="DN") print(cyan + "createDifference() : " + bold + green + "FIN DE VECTORISATION DU RASTER DE DIFFERENCE" + endC) print(cyan + "createDifference() : " + bold + green + "SIMPLIFICATION VECTEUR DE DIFFERENCE..." + endC) simplifyVector(vector_diff_mns_filtered_output, vector_difference_output, simplifie_param, format_vector) print(cyan + "createDifference() : " + bold + green + "FIN DE SIMPLIFICATION DI VECTEUR DE DIFFERENCE" + endC) # ETAPE 10 : SUPPRESIONS FICHIERS INTERMEDIAIRES INUTILES if not save_results_intermediate: # Supression des .geom dans le dossier for to_delete in glob.glob(repertory_mask_temp + os.sep + "*.geom"): removeFile(to_delete) # Suppression des repertoires temporaires deleteDir(repertory_mask_temp) deleteDir(repertory_samples_cutting_temp) deleteDir(repertory_samples_buff_temp) deleteDir(repertory_output_temp) # Mise à jour du Log ending_event = "createDifference() : create macro samples ending : " timeLine(path_time_log, ending_event) return
def main(gui=False): # Définition des différents paramètres du parser parser = argparse.ArgumentParser( formatter_class=argparse.RawTextHelpFormatter, prog="VectorRasterCutting", description="\ Info : Cutting list of raster and vector file by vector file. \n\ Objectif : Découper des fichiers raster et vecteurs. \n\ Example : python VectorRasterCutting.py -il /mnt/Data/gilles.fouvet/RA/Haute-Savoie_SansTunnel/haute_savoie_usage_sans_tunnels.tif \n\ /mnt/Data/gilles.fouvet/RA/Haute-Savoie_500m/Global/Resultats/Raster/Haute_Savoie_Couverture_Apres_PT_Directs_et_Indirects.tif \n\ /mnt/Data/gilles.fouvet/RA/Haute-Savoie_200m/Global/Resultats/Raster/Haute_Savoie_Couverture_Apres_PT_Directs_et_Indirects_200m2.tif \n\ /mnt/Data/gilles.fouvet/RA/Haute-Savoie_500m/Global/Resultats/Raster/Haute-Savoie_Couverture_Apres_PT_Directs_et_Indirects_500m2.tif \n\ -vl /mnt/Data/gilles.fouvet/RA/Haute-Savoie_500m/Global/Resultats/Vecteur/Sauvegarde_500m2/Haute-Savoie_Couverture_Apres_PT_Directs_et_Indirects_clnd_500m2.shp \n\ /mnt/Data/gilles.fouvet/RA/Haute-Savoie_200m/Global/Resultats/Vecteur/Sauvegarde_200m2/Haute-Savoie_Couverture_Apres_PT_Directs_et_Indirects_clnd_200m2.shp \n\ -c /mnt/Data/gilles.fouvet/RA/Haute-Savoie/Global/Preparation/Study_Boundaries/DEP74.SHP \n\ -iol /mnt/Data/gilles.fouvet/RA/Haute-Savoie_SansTunnel/haute_savoie_usage_sans_tunnels.tif \n\ /mnt/Data/gilles.fouvet/RA/Haute-Savoie_500m/Global/Resultats/Raster/Haute_Savoie_Couverture_Apres_PT_Directs_et_Indirects_cut.tif \n\ /mnt/Data/gilles.fouvet/RA/Haute-Savoie_200m/Global/Resultats/Raster/Haute_Savoie_Couverture_Apres_PT_Directs_et_Indirects_200m2_cut.tif \n\ /mnt/Data/gilles.fouvet/RA/Haute-Savoie_500m/Global/Resultats/Raster/Haute-Savoie_Couverture_Apres_PT_Directs_et_Indirects_500m2_cut.tif \n\ -vol /mnt/Data/gilles.fouvet/RA/Haute-Savoie_500m/Global/Resultats/Vecteur/Sauvegarde_500m2/Haute-Savoie_Couverture_Apres_PT_Directs_et_Indirects_clnd_500m2_cut.shp \n\ /mnt/Data/gilles.fouvet/RA/Haute-Savoie_200m/Global/Resultats/Vecteur/Sauvegarde_200m2/Haute-Savoie_Couverture_Apres_PT_Directs_et_Indirects_clnd_200m2_cut.shp \n\ -b 10.0 \n\ -r 5.0 \n\ -epsg 2154 \n\ -ndv 65535 \n\ -z \n\ -log /mnt/Data/gilles.fouvet/RA/Haute-Savoie_SansTunnel/FichierHaute-Savoie.log" ) parser.add_argument('-il', '--images_input_list', default="", nargs="+", help="List input images to cut", type=str, required=False) parser.add_argument('-vl', '--vectors_input_list', default="", nargs="+", help="List input vectors to cut.", type=str, required=False) parser.add_argument( '-c', '--vector_cut', default="", help="Vector input contain the vector to cut images and vectors input.", type=str, required=True) parser.add_argument('-iol', '--images_output_list', default="", nargs="+", help="List output images cut", type=str, required=False) parser.add_argument('-vol', '--vectors_output_list', default="", nargs="+", help="List output vectors to cut.", type=str, required=False) parser.add_argument( '-b', '--buffer_size', default=0, help="Option : Value of positive buffer in metrer , default : 0 ", type=float, required=False) parser.add_argument( '-r', '--round_pixel_size', default=0, help= "Option : Value of around to dunnage if at 0 the value is read size pixel image (in metre), default : 0 ", type=float, required=False) parser.add_argument( "-epsg", '--epsg', default=0, help= "Option : Projection parameter of data if 0 used projection of raster file", type=int, required=False) parser.add_argument( "-ndv", '--no_data_value', default=0, help="Option pixel value for raster file to no data, default : 0 ", type=int, required=False) parser.add_argument( '-rm', '--resampling_methode', default="", help= "Option : Define the algo methode uses to resampling. By default : if empty (by default) not used.", type=str, required=False) parser.add_argument( '-z', '--z_compress', action='store_true', default=False, help= "Option : The rasters images cutting and compress are produced, default : False", required=False) parser.add_argument( '-raf', '--format_raster', default="GTiff", help="Option : Format output image, by default : GTiff (GTiff, HFA...)", type=str, required=False) parser.add_argument('-vef', '--format_vector', default="ESRI Shapefile", help="Format of the output file.", type=str, required=False) parser.add_argument( '-rae', '--extension_raster', default=".tif", help="Option : Extension file for image raster. By default : '.tif'", type=str, required=False) parser.add_argument( '-vee', '--extension_vector', default=".shp", help="Option : Extension file for vector. By default : '.shp'", type=str, required=False) parser.add_argument('-log', '--path_time_log', default="", help="Name of log", type=str, required=False) parser.add_argument( '-sav', '--save_results_inter', action='store_true', default=False, help= "Save or delete intermediate result after the process. By default, False", required=False) parser.add_argument( '-now', '--overwrite', action='store_false', default=True, help="Overwrite files with same names. By default, True", required=False) parser.add_argument( '-debug', '--debug', default=3, help="Option : Value of level debug trace, default : 3 ", type=int, required=False) args = displayIHM(gui, parser) # RECUPERATION DES ARGUMENTS # Récupération des l'images d'entrées if args.images_input_list != None: images_input_list = args.images_input_list for image_input in images_input_list: if image_input != "" and not os.path.isfile(image_input): raise NameError(cyan + "VectorRasterCutting : " + bold + red + "File %s not existe!" % (image_input) + endC) # Récupération des vecteurs d'entrées if args.vectors_input_list != None: vectors_input_list = args.vectors_input_list for vector_input in vectors_input_list: if vector_input != "" and not os.path.isfile(vector_input): raise NameError(cyan + "VectorRasterCutting : " + bold + red + "File %s not existe!" % (vector_input) + endC) # Récupération du vecteur de découpe if args.vector_cut != None: vector_cut = args.vector_cut if not os.path.isfile(vector_cut): raise NameError(cyan + "VectorRasterCutting : " + bold + red + "File %s not existe!" % (vector_cut) + endC) # Récupération des l'images de sorties if args.images_output_list != None: images_output_list = args.images_output_list # Récupération des vecteurs de sorties if args.vectors_output_list != None: vectors_output_list = args.vectors_output_list # Parametre du buffer if args.buffer_size != None: buffer_size = args.buffer_size # Parametre de l'arrondi if args.round_pixel_size != None: round_pixel_size = args.round_pixel_size # Paramettre de projection if args.epsg != None: epsg = args.epsg # Paramettre des no data if args.no_data_value != None: no_data_value = args.no_data_value # Paramètres definition de la metode de resampling if args.resampling_methode != None: resampling_methode = args.resampling_methode # option de compression if args.z_compress != None: z_compress = args.z_compress # Paramètre format des images de sortie if args.format_raster != None: format_raster = args.format_raster # Récupération du format du fichier de sortie if args.format_vector != None: format_vector = args.format_vector # Paramètre de l'extension des images rasters if args.extension_raster != None: extension_raster = args.extension_raster # Récupération de l'extension des fichiers vecteurs if args.extension_vector != None: extension_vector = args.extension_vector # Récupération du nom du fichier log if args.path_time_log != None: path_time_log = args.path_time_log # Récupération de l'option de sauvegarde des fichiers temporaires if args.save_results_inter != None: save_results_intermediate = args.save_results_inter # Récupération de l'option écrasement if args.overwrite != None: overwrite = args.overwrite # Récupération de l'option niveau de debug if args.debug != None: global debug debug = args.debug if debug >= 3: print(bold + green + "VectorRasterCutting : Variables dans le parser" + endC) print(cyan + "VectorRasterCutting : " + endC + "images_input_list : " + str(images_input_list) + endC) print(cyan + "VectorRasterCutting : " + endC + "vectors_input_list : " + str(vectors_input_list) + endC) print(cyan + "VectorRasterCutting : " + endC + "vector_cut : " + str(vector_cut) + endC) print(cyan + "VectorRasterCutting : " + endC + "images_output_list : " + str(images_output_list) + endC) print(cyan + "VectorRasterCutting : " + endC + "vectors_output_list : " + str(vectors_output_list) + endC) print(cyan + "VectorRasterCutting : " + endC + "round_pixel_size : " + str(round_pixel_size) + endC) print(cyan + "VectorRasterCutting : " + endC + "buffer_size : " + str(buffer_size) + endC) print(cyan + "VectorRasterCutting : " + endC + "epsg : " + str(epsg) + endC) print(cyan + "VectorRasterCutting : " + endC + "no_data_value : " + str(no_data_value) + endC) print(cyan + "VectorRasterCutting : " + endC + "resampling_methode : " + str(resampling_methode) + endC) print(cyan + "VectorRasterCutting : " + endC + "z_compress : " + str(z_compress) + endC) print(cyan + "VectorRasterCutting : " + endC + "format_raster : " + str(format_raster) + endC) print(cyan + "VectorRasterCutting : " + endC + "format_vector : " + str(format_vector) + endC) print(cyan + "VectorRasterCutting : " + endC + "extension_raster : " + str(extension_raster) + endC) print(cyan + "VectorRasterCutting : " + endC + "extension_vector : " + str(extension_vector) + endC) print(cyan + "VectorRasterCutting : " + endC + "path_time_log : " + str(path_time_log) + endC) print(cyan + "VectorRasterCutting : " + endC + "save_results_inter : " + str(save_results_intermediate) + endC) print(cyan + "VectorRasterCutting : " + endC + "overwrite : " + str(overwrite) + endC) print(cyan + "VectorRasterCutting : " + endC + "debug : " + str(debug) + endC) # EXECUTION DE LA FONCTION # Si les dossiers de sorties n'existent pas, on les crées for image_output in images_output_list: if not os.path.isdir(os.path.dirname(image_output)): os.makedirs(os.path.dirname(image_output)) for vector_output in vectors_output_list: if not os.path.isdir(os.path.dirname(vector_output)): os.makedirs(os.path.dirname(vector_output)) # Execution de la fonction de decoupe pour une liste d'image raster if len(images_input_list) > 0: cutRasterImages(images_input_list, vector_cut, images_output_list, buffer_size, round_pixel_size, epsg, no_data_value, resampling_methode, z_compress, path_time_log, format_raster, format_vector, extension_raster, extension_vector, save_results_intermediate, overwrite) # Execution de la fonction de decoupe pour une liste de vecteur if len(vectors_input_list) > 0: # Pour tous les fichiers vecteurs à découpper cutoutVectors(vector_cut, vectors_input_list, vectors_output_list, overwrite, format_vector)
def createMacroSamples(image_input, vector_to_cut_input, vector_sample_output, raster_sample_output, bd_vector_input_list, bd_buff_list, sql_expression_list, path_time_log, macro_sample_name="", simplify_vector_param=10.0, format_vector='ESRI Shapefile', extension_vector=".shp", save_results_intermediate=False, overwrite=True) : # Mise à jour du Log starting_event = "createMacroSamples() : create macro samples starting : " timeLine(path_time_log,starting_event) if debug >= 3: print(bold + green + "createMacroSamples() : Variables dans la fonction" + endC) print(cyan + "createMacroSamples() : " + endC + "image_input : " + str(image_input) + endC) print(cyan + "createMacroSamples() : " + endC + "vector_to_cut_input : " + str(vector_to_cut_input) + endC) print(cyan + "createMacroSamples() : " + endC + "vector_sample_output : " + str(vector_sample_output) + endC) print(cyan + "createMacroSamples() : " + endC + "raster_sample_output : " + str(raster_sample_output) + endC) print(cyan + "createMacroSamples() : " + endC + "bd_vector_input_list : " + str(bd_vector_input_list) + endC) print(cyan + "createMacroSamples() : " + endC + "bd_buff_list : " + str(bd_buff_list) + endC) print(cyan + "createMacroSamples() : " + endC + "sql_expression_list : " + str(sql_expression_list) + endC) print(cyan + "createMacroSamples() : " + endC + "path_time_log : " + str(path_time_log) + endC) print(cyan + "createMacroSamples() : " + endC + "macro_sample_name : " + str(macro_sample_name) + endC) print(cyan + "createMacroSamples() : " + endC + "simplify_vector_param : " + str(simplify_vector_param) + endC) print(cyan + "createMacroSamples() : " + endC + "format_vector : " + str(format_vector)) print(cyan + "createMacroSamples() : " + endC + "extension_vector : " + str(extension_vector) + endC) print(cyan + "createMacroSamples() : " + endC + "save_results_intermediate : " + str(save_results_intermediate) + endC) print(cyan + "createMacroSamples() : " + endC + "overwrite : " + str(overwrite) + endC) # Constantes FOLDER_MASK_TEMP = "Mask_" FOLDER_CUTTING_TEMP = "Cut_" FOLDER_FILTERING_TEMP = "Filter_" FOLDER_BUFF_TEMP = "Buff_" SUFFIX_MASK_CRUDE = "_crude" SUFFIX_MASK = "_mask" SUFFIX_VECTOR_CUT = "_cut" SUFFIX_VECTOR_FILTER = "_filt" SUFFIX_VECTOR_BUFF = "_buff" CODAGE = "uint8" # ETAPE 1 : NETTOYER LES DONNEES EXISTANTES print(cyan + "createMacroSamples() : " + bold + green + "Nettoyage de l'espace de travail..." + endC) # Nom du repertoire de calcul repertory_macrosamples_output = os.path.dirname(vector_sample_output) # Test si le vecteur echantillon existe déjà et si il doit être écrasés check = os.path.isfile(vector_sample_output) or os.path.isfile(raster_sample_output) if check and not overwrite: # Si les fichiers echantillons existent deja et que overwrite n'est pas activé print(bold + yellow + "File sample : " + vector_sample_output + " already exists and will not be created again." + endC) else : if check: try: removeVectorFile(vector_sample_output) removeFile(raster_sample_output) except Exception: pass # si le fichier n'existe pas, il ne peut pas être supprimé : cette étape est ignorée # Définition des répertoires temporaires repertory_mask_temp = repertory_macrosamples_output + os.sep + FOLDER_MASK_TEMP + macro_sample_name repertory_samples_cutting_temp = repertory_macrosamples_output + os.sep + FOLDER_CUTTING_TEMP + macro_sample_name repertory_samples_filtering_temp = repertory_macrosamples_output + os.sep + FOLDER_FILTERING_TEMP + macro_sample_name repertory_samples_buff_temp = repertory_macrosamples_output + os.sep + FOLDER_BUFF_TEMP + macro_sample_name if debug >= 4: print(cyan + "createMacroSamples() : " + endC + "Création du répertoire : " + str(repertory_mask_temp)) print(cyan + "createMacroSamples() : " + endC + "Création du répertoire : " + str(repertory_samples_cutting_temp)) print(cyan + "createMacroSamples() : " + endC + "Création du répertoire : " + str(repertory_samples_buff_temp)) # Création des répertoires temporaire qui n'existent pas if not os.path.isdir(repertory_macrosamples_output): os.makedirs(repertory_macrosamples_output) if not os.path.isdir(repertory_mask_temp): os.makedirs(repertory_mask_temp) if not os.path.isdir(repertory_samples_cutting_temp): os.makedirs(repertory_samples_cutting_temp) if not os.path.isdir(repertory_samples_filtering_temp): os.makedirs(repertory_samples_filtering_temp) if not os.path.isdir(repertory_samples_buff_temp): os.makedirs(repertory_samples_buff_temp) # Nettoyage des répertoires temporaire qui ne sont pas vide cleanTempData(repertory_mask_temp) cleanTempData(repertory_samples_cutting_temp) cleanTempData(repertory_samples_filtering_temp) cleanTempData(repertory_samples_buff_temp) print(cyan + "createMacroSamples() : " + bold + green + "... fin du nettoyage" + endC) # ETAPE 2 : DECOUPAGE DES VECTEURS print(cyan + "createMacroSamples() : " + bold + green + "Decoupage des echantillons ..." + endC) if vector_to_cut_input == None : # 2.1 : Création du masque délimitant l'emprise de la zone par image image_name = os.path.splitext(os.path.basename(image_input))[0] vector_mask = repertory_mask_temp + os.sep + image_name + SUFFIX_MASK_CRUDE + extension_vector createVectorMask(image_input, vector_mask) # 2.2 : Simplification du masque vector_simple_mask = repertory_mask_temp + os.sep + image_name + SUFFIX_MASK + extension_vector simplifyVector(vector_mask, vector_simple_mask, simplify_vector_param, format_vector) else : vector_simple_mask = vector_to_cut_input # 2.3 : Découpage des vecteurs de bd exogenes avec le masque vectors_cut_list = [] for vector_input in bd_vector_input_list : vector_name = os.path.splitext(os.path.basename(vector_input))[0] vector_cut = repertory_samples_cutting_temp + os.sep + vector_name + SUFFIX_VECTOR_CUT + extension_vector vectors_cut_list.append(vector_cut) cutoutVectors(vector_simple_mask, bd_vector_input_list, vectors_cut_list, format_vector) print(cyan + "createMacroSamples() : " + bold + green + "... fin du decoupage" + endC) # ETAPE 3 : FILTRAGE DES VECTEURS print(cyan + "createMacroSamples() : " + bold + green + "Filtrage des echantillons ..." + endC) vectors_filtered_list = [] if sql_expression_list != [] : for idx_vector in range (len(bd_vector_input_list)): vector_name = os.path.splitext(os.path.basename(bd_vector_input_list[idx_vector]))[0] vector_cut = vectors_cut_list[idx_vector] if idx_vector < len(sql_expression_list) : sql_expression = sql_expression_list[idx_vector] else : sql_expression = "" vector_filtered = repertory_samples_filtering_temp + os.sep + vector_name + SUFFIX_VECTOR_FILTER + extension_vector vectors_filtered_list.append(vector_filtered) # Filtrage par ogr2ogr if sql_expression != "": names_attribut_list = getAttributeNameList(vector_cut, format_vector) column = "'" for name_attribut in names_attribut_list : column += name_attribut + ", " column = column[0:len(column)-2] column += "'" ret = filterSelectDataVector(vector_cut, vector_filtered, column, sql_expression, format_vector) if not ret : print(cyan + "createMacroSamples() : " + bold + yellow + "Attention problème lors du filtrage des BD vecteurs l'expression SQL %s est incorrecte" %(sql_expression) + endC) copyVectorFile(vector_cut, vector_filtered) else : print(cyan + "createMacroSamples() : " + bold + yellow + "Pas de filtrage sur le fichier du nom : " + endC + vector_filtered) copyVectorFile(vector_cut, vector_filtered) else : print(cyan + "createMacroSamples() : " + bold + yellow + "Pas de filtrage demandé" + endC) for idx_vector in range (len(bd_vector_input_list)): vector_cut = vectors_cut_list[idx_vector] vectors_filtered_list.append(vector_cut) print(cyan + "createMacroSamples() : " + bold + green + "... fin du filtrage" + endC) # ETAPE 4 : BUFFERISATION DES VECTEURS print(cyan + "createMacroSamples() : " + bold + green + "Mise en place des tampons..." + endC) vectors_buffered_list = [] if bd_buff_list != [] : # Parcours des vecteurs d'entrée for idx_vector in range (len(bd_vector_input_list)): vector_name = os.path.splitext(os.path.basename(bd_vector_input_list[idx_vector]))[0] buff = bd_buff_list[idx_vector] vector_filtered = vectors_filtered_list[idx_vector] vector_buffered = repertory_samples_buff_temp + os.sep + vector_name + SUFFIX_VECTOR_BUFF + extension_vector if buff != 0: if os.path.isfile(vector_filtered): if debug >= 3: print(cyan + "createMacroSamples() : " + endC + "vector_filtered : " + str(vector_filtered) + endC) print(cyan + "createMacroSamples() : " + endC + "vector_buffered : " + str(vector_buffered) + endC) print(cyan + "createMacroSamples() : " + endC + "buff : " + str(buff) + endC) bufferVector(vector_filtered, vector_buffered, buff, "", 1.0, 10, format_vector) else : print(cyan + "createMacroSamples() : " + bold + yellow + "Pas de fichier du nom : " + endC + vector_filtered) else : print(cyan + "createMacroSamples() : " + bold + yellow + "Pas de tampon sur le fichier du nom : " + endC + vector_filtered) copyVectorFile(vector_filtered, vector_buffered) vectors_buffered_list.append(vector_buffered) else : print(cyan + "createMacroSamples() : " + bold + yellow + "Pas de tampon demandé" + endC) for idx_vector in range (len(bd_vector_input_list)): vector_filtered = vectors_filtered_list[idx_vector] vectors_buffered_list.append(vector_filtered) print(cyan + "createMacroSamples() : " + bold + green + "... fin de la mise en place des tampons" + endC) # ETAPE 5 : FUSION DES SHAPES print(cyan + "createMacroSamples() : " + bold + green + "Fusion par macroclasse ..." + endC) # si une liste de fichier shape à fusionner existe if not vectors_buffered_list: print(cyan + "createMacroSamples() : " + bold + yellow + "Pas de fusion sans donnee à fusionner" + endC) # s'il n'y a qu'un fichier shape en entrée elif len(vectors_buffered_list) == 1: print(cyan + "createMacroSamples() : " + bold + yellow + "Pas de fusion pour une seule donnee à fusionner" + endC) copyVectorFile(vectors_buffered_list[0], vector_sample_output) else : # Fusion des fichiers shape vectors_buffered_controled_list = [] for vector_buffered in vectors_buffered_list : if os.path.isfile(vector_buffered) and (getGeometryType(vector_buffered, format_vector) in ('POLYGON', 'MULTIPOLYGON')) and (getNumberFeature(vector_buffered, format_vector) > 0): vectors_buffered_controled_list.append(vector_buffered) else : print(cyan + "createMacroSamples() : " + bold + red + "Attention fichier bufferisé est vide il ne sera pas fusionné : " + endC + vector_buffered, file=sys.stderr) fusionVectors(vectors_buffered_controled_list, vector_sample_output, format_vector) print(cyan + "createMacroSamples() : " + bold + green + "... fin de la fusion" + endC) # ETAPE 6 : CREATION DU FICHIER RASTER RESULTAT SI DEMANDE # Creation d'un masque binaire if raster_sample_output != "" and image_input != "" : repertory_output = os.path.dirname(raster_sample_output) if not os.path.isdir(repertory_output): os.makedirs(repertory_output) rasterizeBinaryVector(vector_sample_output, image_input, raster_sample_output, 1, CODAGE) # ETAPE 7 : SUPPRESIONS FICHIERS INTERMEDIAIRES INUTILES # Suppression des données intermédiaires if not save_results_intermediate: # Supression du fichier de decoupe si celui ci a été créer if vector_simple_mask != vector_to_cut_input : if os.path.isfile(vector_simple_mask) : removeVectorFile(vector_simple_mask) # Suppression des repertoires temporaires deleteDir(repertory_mask_temp) deleteDir(repertory_samples_cutting_temp) deleteDir(repertory_samples_filtering_temp) deleteDir(repertory_samples_buff_temp) # Mise à jour du Log ending_event = "createMacroSamples() : create macro samples ending : " timeLine(path_time_log,ending_event) return