def validity(tile_name, config_path, output_path, maskOut_name, view_threshold, workingDirectory=None, RAM=128): """ function dedicated to compute validity raster/vector by tile Parameters ---------- tile_name [string] tile's name config_path [string] absolute path to the configuration file maskOut_name [string] output vector mask's name view_threshold [int] threshold working_directory [string] absolute path to a working directory RAM [int] pipeline's size (Mo) """ import os import shutil from iota2.Common.ServiceConfigFile import iota2_parameters from iota2.Sensors.Sensors_container import sensors_container from iota2.Common.OtbAppBank import CreateConcatenateImagesApplication from iota2.Common.OtbAppBank import CreateBandMathApplication from iota2.Common.Utils import run from iota2.Common.FileUtils import erodeShapeFile from iota2.Common.FileUtils import removeShape from iota2.Common.FileUtils import ensure_dir features_dir = os.path.join(output_path, "features", tile_name) validity_name = "nbView.tif" validity_out = os.path.join(features_dir, validity_name) validity_processing = validity_out if workingDirectory: ensure_dir(os.path.join(workingDirectory, tile_name)) validity_processing = os.path.join(workingDirectory, tile_name, validity_name) running_parameters = iota2_parameters(config_path) sensors_parameters = running_parameters.get_sensors_parameters(tile_name) remote_sensor_container = sensors_container(tile_name, workingDirectory, output_path, **sensors_parameters) sensors_time_series_masks = remote_sensor_container.get_sensors_time_series_masks( available_ram=RAM) sensors_masks_size = [] sensors_masks = [] for sensor_name, (time_series_masks, time_series_dep, nb_bands) in sensors_time_series_masks: if sensor_name.lower() == "sentinel1": for _, time_series_masks_app in list(time_series_masks.items()): time_series_masks_app.Execute() sensors_masks.append(time_series_masks_app) else: time_series_masks.Execute() sensors_masks.append(time_series_masks) sensors_masks_size.append(nb_bands) total_dates = sum(sensors_masks_size) merge_masks = CreateConcatenateImagesApplication({ "il": sensors_masks, "ram": str(RAM) }) merge_masks.Execute() validity_app = CreateBandMathApplication({ "il": merge_masks, "exp": "{}-({})".format( total_dates, "+".join(["im1b{}".format(i + 1) for i in range(total_dates)])), "ram": str(0.7 * RAM), "pixType": "uint8" if total_dates < 255 else "uint16", "out": validity_processing }) if not os.path.exists(os.path.join(features_dir, validity_name)): validity_app.ExecuteAndWriteOutput() if workingDirectory: shutil.copy(validity_processing, os.path.join(features_dir, validity_name)) threshold_raster_out = os.path.join(features_dir, maskOut_name.replace(".shp", ".tif")) threshold_vector_out_tmp = os.path.join( features_dir, maskOut_name.replace(".shp", "_TMP.shp")) threshold_vector_out = os.path.join(features_dir, maskOut_name) input_threshold = validity_processing if os.path.exists( validity_processing) else validity_out threshold_raster = CreateBandMathApplication({ "il": input_threshold, "exp": "im1b1>={}?1:0".format(view_threshold), "ram": str(0.7 * RAM), "pixType": "uint8", "out": threshold_raster_out }) threshold_raster.ExecuteAndWriteOutput() cmd_poly = f"gdal_polygonize.py -mask {threshold_raster_out} {threshold_raster_out} -f \"ESRI Shapefile\" {threshold_vector_out_tmp} {os.path.splitext(os.path.basename(threshold_vector_out_tmp))[0]} cloud" run(cmd_poly) erodeShapeFile(threshold_vector_out_tmp, threshold_vector_out, 0.1) os.remove(threshold_raster_out) removeShape(threshold_vector_out_tmp.replace(".shp", ""), [".prj", ".shp", ".dbf", ".shx"])
def generate(self): """ """ import shutil from iota2.Common.OtbAppBank import CreateImageClassifierApplication from iota2.Common.OtbAppBank import CreateClassifyAutoContext from iota2.Common.OtbAppBank import CreateBandMathApplication from iota2.Common.OtbAppBank import CreateBandMathXApplication from iota2.Common.FileUtils import ensure_dir if self.working_directory: self.classification = os.path.join( self.working_directory, os.path.split(self.classification)[-1]) self.confidence = os.path.join(self.working_directory, os.path.split(self.confidence)[-1]) classifier_options = { "in": self.features_stack, "model": self.classifier_model, "confmap": "{}?&writegeom=false".format(self.confidence), "ram": str(0.4 * float(self.RAM)), "pixType": self.pixType, "out": "{}?&writegeom=false".format(self.classification) } if self.auto_context: tmp_dir = os.path.join( self.output_directory, "tmp_model_{}_seed_{}_tile_{}".format(self.model_name, self.seed, self.tile)) if self.working_directory: tmp_dir = os.path.join( self.working_directory, "tmp_model_{}_seed_{}_tile_{}".format( self.model_name, self.seed, self.tile)) ensure_dir(tmp_dir) classifier_options = { "in": self.features_stack, "inseg": self.auto_context["tile_segmentation"], "models": self.classifier_model, "lablist": [str(lab) for lab in self.auto_context["labels_list"]], "confmap": "{}?&writegeom=false".format(self.confidence), "ram": str(0.4 * float(self.RAM)), "pixType": self.pixType, "tmpdir": tmp_dir, "out": "{}?&writegeom=false".format(self.classification) } if self.proba_map_path: all_class = [] for _, dico_seed in list(self.models_class.items()): for _, avail_class in list(dico_seed.items()): all_class += avail_class all_class = sorted(list(set(all_class))) nb_class_run = len(all_class) if self.working_directory: self.proba_map_path = os.path.join( self.working_directory, os.path.split(self.proba_map_path)[-1]) classifier_options["probamap"] = "{}?&writegeom=false".format( self.proba_map_path) classifier_options["nbclasses"] = str(nb_class_run) if self.stats: classifier_options["imstat"] = self.stats if self.auto_context: classifier = CreateClassifyAutoContext(classifier_options) else: classifier = CreateImageClassifierApplication(classifier_options) LOGGER.info("Compute Classification : {}".format(self.classification)) classifier.ExecuteAndWriteOutput() LOGGER.info("Classification : {} done".format(self.classification)) if self.classif_mask: mask_filter = CreateBandMathApplication({ "il": [self.classification, self.classif_mask], "ram": str(self.RAM), "pixType": self.pixType, "out": self.classification, "exp": "im2b1>=1?im1b1:0" }) mask_filter.ExecuteAndWriteOutput() mask_filter = CreateBandMathApplication({ "il": [self.confidence, self.classif_mask], "ram": str(self.RAM), "pixType": "float", "out": self.confidence, "exp": "im2b1>=1?im1b1:0" }) mask_filter.ExecuteAndWriteOutput() if self.proba_map_path: expr = "im2b1>=1?im1:{}".format("{" + ";".join(["0"] * nb_class_run) + "}") mask_filter = CreateBandMathXApplication({ "il": [self.proba_map_path, self.classif_mask], "ram": str(self.RAM), "pixType": "uint16", "out": self.proba_map_path, "exp": expr }) mask_filter.ExecuteAndWriteOutput() if self.proba_map_path: class_model = self.models_class[self.model_name][int(self.seed)] if len(class_model) != len(all_class): LOGGER.info("reordering the probability map : '{}'".format( self.proba_map_path)) self.reorder_proba_map(self.proba_map_path, self.proba_map_path, class_model, all_class) if self.working_directory: shutil.copy( self.classification, os.path.join(self.output_directory, os.path.split(self.classification)[-1])) #~ os.remove(self.classification) shutil.copy( self.confidence, os.path.join(self.output_directory, os.path.split(self.confidence)[-1])) #~ os.remove(self.confidence) if self.proba_map_path: shutil.copy( self.proba_map_path, os.path.join(self.output_directory, os.path.split(self.proba_map_path)[-1])) os.remove(self.proba_map_path) if self.auto_context: shutil.rmtree(tmp_dir)