def multiBuffersVector(input_file, output_dir, buffer_size, nb_buffers, path_time_log, epsg=2154, format_vector="ESRI Shapefile", project_encoding="UTF-8", overwrite=True): # Mise à jour du Log starting_event = "multiBuffersVector() : Select multi buffers vector starting : " timeLine(path_time_log,starting_event) # Configuration du format vecteur driver = ogr.GetDriverByName(format_vector) # Création de la référence spatiale srs = osr.SpatialReference() srs.ImportFromEPSG(epsg) result_list = [] extension_vector = os.path.splitext(os.path.split(input_file)[1])[1] nom_input_file = os.path.splitext(os.path.split(input_file)[1])[0] output_repertory = output_dir + os.sep + "multi_ring_buffers_" + nom_input_file + extension_vector for i in range(1, nb_buffers+1): # Création du buffer output_temp_buffer = output_dir + os.sep + os.path.splitext(os.path.split(input_file)[1])[0] + "_buffer_" + str(i*buffer_size) + extension_vector bufferVector(input_file, output_temp_buffer, i*buffer_size, "", 1.0, 10, format_vector) # Ajout d'un champ taille de buffer data_source_buffer = driver.Open(output_temp_buffer, 1) buffer_layer = data_source_buffer.GetLayer(0) buff_size_field = ogr.FieldDefn("size_buff", ogr.OFTReal) buffer_layer.CreateField(buff_size_field) for feature in buffer_layer: feature.SetField("size_buff",i*buffer_size) buffer_layer.SetFeature(feature) # Ajout du premier buffer à la liste finale if i == 1: result_list.append(output_temp_buffer) # Transformation des buffers en anneaux if i>1: i_preced = i-1 output_temp_buffer_preced = output_dir + os.sep + nom_input_file + "_buffer_" + str(i_preced*buffer_size) + extension_vector output_temp_ring = output_dir + os.sep + nom_input_file + "_ring_" + str(i*buffer_size) + extension_vector differenceVector(output_temp_buffer_preced, output_temp_buffer, output_temp_ring, format_vector) # Remplissage du champ taille du buffer data_source_ring = driver.Open(output_temp_ring, 1) ring_layer = data_source_ring.GetLayer(0) for ring in ring_layer: ring.SetField("size_buff",i*buffer_size) ring_layer.SetFeature(ring) # Ajout de l'anneau à la liste finale result_list.append(output_temp_ring) fusionVectors(result_list, output_repertory, format_vector) # Mise à jour du Log ending_event = "multiBuffersVector() : multi buffers vector ending : " timeLine(path_time_log,ending_event) return output_repertory
def computeQualityIndiceRateQuantity(raster_input, vector_sample_input, repertory_output, base_name, geom, size_grid, pixel_size_x, pixel_size_y, field_value_verif, field_value_other, no_data_value, epsg, format_raster, format_vector, extension_raster, extension_vector, overwrite=True, save_results_intermediate=False): # Définition des constantes EXT_TXT = '.txt' SUFFIX_STUDY = '_study' SUFFIX_CUT = '_cut' SUFFIX_BUILD = '_build' SUFFIX_OTHER = '_other' SUFFIX_LOCAL = '_local' SUFFIX_MATRIX = '_matrix' FIELD_NAME_CLASSIF = "classif" FIELD_TYPE = ogr.OFTInteger # Les variables locales vector_local_study = repertory_output + os.sep + base_name + SUFFIX_LOCAL + SUFFIX_STUDY + extension_vector vector_local_cut_study = repertory_output + os.sep + base_name + SUFFIX_LOCAL + SUFFIX_CUT + SUFFIX_STUDY + extension_vector vector_local_cut_build = repertory_output + os.sep + base_name + SUFFIX_LOCAL + SUFFIX_CUT + SUFFIX_BUILD + extension_vector vector_local_cut_other = repertory_output + os.sep + base_name + SUFFIX_LOCAL + SUFFIX_CUT + SUFFIX_OTHER + extension_vector vector_local_cut = repertory_output + os.sep + base_name + SUFFIX_LOCAL + SUFFIX_CUT + extension_vector raster_local_cut = repertory_output + os.sep + base_name + SUFFIX_LOCAL + SUFFIX_CUT + extension_raster matrix_local_file = repertory_output + os.sep + base_name + SUFFIX_LOCAL + SUFFIX_CUT + SUFFIX_MATRIX + EXT_TXT class_ref_list = None class_pro_list = None rate_quantity_list = None matrix_origine = None kappa = 0.0 overall_accuracy = 0.0 # Netoyage les fichiers de travail local if os.path.isfile(vector_local_study): removeVectorFile(vector_local_study) if os.path.isfile(vector_local_cut_study): removeVectorFile(vector_local_cut_study) if os.path.isfile(vector_local_cut): removeVectorFile(vector_local_cut) if os.path.isfile(vector_local_cut_build): removeVectorFile(vector_local_cut_build) if os.path.isfile(vector_local_cut_other): removeVectorFile(vector_local_cut_other) if os.path.isfile(raster_local_cut): removeFile(raster_local_cut) if os.path.isfile(matrix_local_file): removeFile(matrix_local_file) # Creation d'un shape file de travail local polygon_attr_geom_dico = {"1": [geom, {}]} createPolygonsFromGeometryList({}, polygon_attr_geom_dico, vector_local_study, epsg, format_vector) # Découpe sur zone local d'étude du fichier vecteur de référence cutVector(vector_local_study, vector_sample_input, vector_local_cut_build, format_vector) differenceVector(vector_local_cut_build, vector_local_study, vector_local_cut_other, format_vector) addNewFieldVector(vector_local_cut_build, FIELD_NAME_CLASSIF, FIELD_TYPE, field_value_verif, None, None, format_vector) addNewFieldVector(vector_local_cut_other, FIELD_NAME_CLASSIF, FIELD_TYPE, field_value_other, None, None, format_vector) input_shape_list = [vector_local_cut_build, vector_local_cut_other] fusionVectors(input_shape_list, vector_local_cut) # Découpe sur zone local d'étude du fichier rasteur de classification if not cutImageByVector(vector_local_study, raster_input, raster_local_cut, pixel_size_x, pixel_size_y, no_data_value, 0, format_raster, format_vector): return class_ref_list, class_pro_list, rate_quantity_list, kappa, overall_accuracy, matrix_origine # Calcul de la matrice de confusion computeConfusionMatrix(raster_local_cut, vector_local_cut, "", FIELD_NAME_CLASSIF, matrix_local_file, overwrite) # lecture de la matrice de confusion matrix, class_ref_list, class_pro_list = readConfusionMatrix( matrix_local_file) matrix_origine = copy.deepcopy(matrix) if matrix == []: print( cyan + "computeQualityIndiceRateQuantity() : " + bold + yellow + "!!! Une erreur c'est produite au cours de la lecture de la matrice de confusion : " + matrix_local_file + ". Voir message d'erreur." + endC) matrix_origine = None return class_ref_list, class_pro_list, rate_quantity_list, kappa, overall_accuracy, matrix_origine # Correction de la matrice de confusion # Dans le cas ou le nombre de microclasses des échantillons de controles # et le nombre de microclasses de la classification sont différents class_missing_list = [] if class_ref_list != class_pro_list: matrix, class_missing_list = correctMatrix(class_ref_list, class_pro_list, matrix, no_data_value) class_count = len(matrix[0]) - len(class_missing_list) # Calcul des indicateurs de qualité : rate_quantity_list precision_list, recall_list, fscore_list, performance_list, rate_false_positive_list, rate_false_negative_list, rate_quantity_list, class_list, overall_accuracy, overall_fscore, overall_performance, kappa = computeIndicators( class_count, matrix, class_ref_list, class_missing_list) # Chercher si une ligne no data existe si c'est le cas correction de la matrice if str(no_data_value) in class_pro_list: pos_col_nodata = class_pro_list.index(str(no_data_value)) for line in matrix_origine: del line[pos_col_nodata] class_pro_list.remove(str(no_data_value)) # Suppression des données temporaires locales if not save_results_intermediate: if os.path.isfile(vector_local_study): removeVectorFile(vector_local_study) if os.path.isfile(vector_local_cut_study): removeVectorFile(vector_local_cut_study) if os.path.isfile(vector_local_cut): removeVectorFile(vector_local_cut) if os.path.isfile(vector_local_cut_build): removeVectorFile(vector_local_cut_build) if os.path.isfile(vector_local_cut_other): removeVectorFile(vector_local_cut_other) if os.path.isfile(raster_local_cut): removeFile(raster_local_cut) if os.path.isfile(matrix_local_file): removeFile(matrix_local_file) return class_ref_list, class_pro_list, rate_quantity_list, kappa, overall_accuracy, matrix_origine
def processTDCfilesSmoothAndFusion(coastline_vectors_input_list, vector_rocky_input, vector_all_output, vector_withrocky_output, generalize_param_method, generalize_param_threshold, name_column_fusion, path_time_log, epsg=2154, format_vector='ESRI Shapefile', extension_vector='.shp', save_results_intermediate=False, overwrite=True): # Mise à jour du Log starting_event = "processTDCfilesSmoothAndFusion() : Create final coastline starting : " timeLine(path_time_log,starting_event) print(endC) print(bold + green + "## START : POST TRAITEMENT TDC" + endC) print(endC) if debug >= 2: print(bold + green + "processTDCfilesSmoothAndFusion() : Variables dans la fonction" + endC) print(cyan + "processTDCfilesSmoothAndFusion() : " + endC + "coastline_vectors_input_list : " + str(coastline_vectors_input_list) + endC) print(cyan + "processTDCfilesSmoothAndFusion() : " + endC + "vector_rocky_input : " + str(vector_rocky_input) + endC) print(cyan + "processTDCfilesSmoothAndFusion() : " + endC + "vector_all_output : " + str(vector_all_output) + endC) print(cyan + "processTDCfilesSmoothAndFusion() : " + endC + "vector_withrocky_output : " + str(vector_withrocky_output) + endC) print(cyan + "processTDCfilesSmoothAndFusion() : " + endC + "generalize_param_method : " + str(generalize_param_method) + endC) print(cyan + "processTDCfilesSmoothAndFusion() : " + endC + "generalize_param_threshold : " + str(generalize_param_threshold) + endC) print(cyan + "processTDCfilesSmoothAndFusion() : " + endC + "name_column_fusion : " + str(name_column_fusion) + endC) print(cyan + "processTDCfilesSmoothAndFusion() : " + endC + "epsg : " + str(epsg) + endC) print(cyan + "processTDCfilesSmoothAndFusion() : " + endC + "format_vector : " + str(format_vector) + endC) print(cyan + "processTDCfilesSmoothAndFusion() : " + endC + "extension_vector : " + str(extension_vector) + endC) print(cyan + "processTDCfilesSmoothAndFusion() : " + endC + "path_time_log : " + str(path_time_log) + endC) print(cyan + "processTDCfilesSmoothAndFusion() : " + endC + "save_results_intermediate : " + str(save_results_intermediate) + endC) print(cyan + "processTDCfilesSmoothAndFusion() : " + endC + "overwrite : " + str(overwrite) + endC) SUFFIX_SMOOTH = "_smooth" SUFFIX_TMP = "_tmp" SUFFIX_SMOOTH = "_smooth" SUFFIX_FUSION = "_fusion" repertory_output = os.path.dirname(vector_all_output) file_name = os.path.splitext(os.path.basename(vector_all_output))[0] vector_fusion = repertory_output + os.sep + file_name + SUFFIX_FUSION + extension_vector repertory_temp = repertory_output + os.sep + file_name + SUFFIX_TMP if not os.path.exists(repertory_temp): os.makedirs(repertory_temp) # Vérification de l'existence du vecteur de sortie check = os.path.isfile(vector_all_output) # Si oui et si la vérification est activée, passage à l'étape suivante if check and not overwrite : print(cyan + "processTDCfilesSmoothAndFusion() : " + bold + green + "Vector general coastline already existe : " + str(vector_all_output) + "." + endC) # Si non ou si la vérification est désactivée, application des traitements de lissage et de la fusion else: # Tentative de suppresion des fichiers try: removeVectorFile(vector_all_output) removeVectorFile(vector_withrocky_output) except Exception: # Ignore l'exception levée si le fichier n'existe pas (et ne peut donc pas être supprimé) pass # Pour tous les fichiers vecteurs d'entrée appliquer le traitement de lissage par GRASS param_generalize_dico = {"method":generalize_param_method, "threshold":generalize_param_threshold} vectors_temp_output_list = [] for input_vector in coastline_vectors_input_list : vector_name = os.path.splitext(os.path.basename(input_vector))[0] output_temp_vector = repertory_temp + os.sep + vector_name + SUFFIX_TMP + extension_vector output_smooth_vector = repertory_temp + os.sep + vector_name + SUFFIX_SMOOTH + extension_vector vectors_temp_output_list.append(output_temp_vector) xmin, xmax, ymin, ymax = getEmpriseFile(input_vector, format_vector) projection = getProjection(input_vector, format_vector) if projection is None: projection = epsg # Init GRASS if debug >= 3: print(cyan + "processTDCfilesSmoothAndFusion() : " + bold + green + "Initialisation de GRASS " + endC) initializeGrass(repertory_temp, xmin, xmax, ymin, ymax, 1, 1, projection) # Generalize GRASS if debug >= 3: print(cyan + "processTDCfilesSmoothAndFusion() : " + bold + green + "Applying smooth GRASS for vector : " + str(input_vector) + endC) smoothGeomGrass(input_vector, output_smooth_vector, param_generalize_dico, format_vector, overwrite) geometries2multigeometries(output_smooth_vector, output_temp_vector, name_column_fusion, format_vector) # Init GRASS if debug >= 3: print(cyan + "processTDCfilesSmoothAndFusion() : " + bold + green + "Cloture de GRASS " + endC) cleanGrass(repertory_temp) if debug >= 3: print(cyan + "processTDCfilesSmoothAndFusion() : " + bold + green + "Fusion de tous les vecteurs lissés : " + str(vectors_temp_output_list) + endC) # Fusion de tous les fichiers vecteurs temp fusionVectors(vectors_temp_output_list, vector_fusion, format_vector) # Suppression du champ "cat" introduit par l'application GRASS deleteFieldsVector(vector_fusion, vector_all_output, ["cat"], format_vector) # Re-met à jour le champ id avec un increment updateIndexVector(vector_all_output, "id", format_vector) # Nettoyage des zones rocheuses sur la ligne de trait de côte if vector_rocky_input != "" and vector_withrocky_output != "": if debug >= 3: print("\n" + cyan + "processTDCfilesSmoothAndFusion() : " + bold + green + "Creation d'un trait de côte generale sans les zones rocheuses : " + str(vector_withrocky_output) + endC) differenceVector(vector_rocky_input, vector_all_output, vector_withrocky_output, overwrite, format_vector) # Suppression des fichiers intermédiaires if not save_results_intermediate : if debug >= 3: print(cyan + "processTDCfilesSmoothAndFusion() : " + bold + green + "Suppression des fichiers temporaires " + endC) if os.path.exists(repertory_temp): shutil.rmtree(repertory_temp) removeVectorFile(vector_fusion) print(endC) print(bold + green + "## END : POST TRAITEMENT TDC" + endC) print(endC) # Mise à jour du Log ending_event = "processTDCfilesSmoothAndFusion() : Create final coastline ending : " timeLine(path_time_log,ending_event) return