def get_time_series_masks(self, ram=128, logger=LOGGER): """ """ from iota2.Common.OtbAppBank import CreateConcatenateImagesApplication from iota2.Common.FileUtils import ensure_dir from iota2.Common.FileUtils import FileSearch_AND time_series_dir = os.path.join(self.features_dir, "tmp") ensure_dir(time_series_dir, raise_exe=False) times_series_mask = os.path.join(time_series_dir, self.time_series_masks_name) # check patterns for pattern in self.data_type: user_feature = FileSearch_AND(self.tile_directory, True, pattern) if not user_feature: msg = "WARNING : '{}' not found in {}".format( pattern, self.tile_directory) logger.error(msg) raise Exception(msg) nb_patterns = len(self.data_type) masks = [] app_dep = [] for _ in range(nb_patterns): dummy_mask, _ = self.footprint(data_value=0) dummy_mask.Execute() app_dep.append(dummy_mask) masks.append(dummy_mask) masks_stack = CreateConcatenateImagesApplication({ "il": masks, "out": times_series_mask, "ram": str(ram) }) return masks_stack, app_dep, nb_patterns
def get_time_series_masks(self, ram=128, logger=LOGGER): """ get time series masks """ import os from iota2.Common.FileUtils import ensure_dir from iota2.Common.OtbAppBank import CreateConcatenateImagesApplication preprocessed_dates = self.preprocess(working_dir=None, ram=str(ram)) if self.write_dates_stack is False: nb_available_dates = len(preprocessed_dates) else: nb_available_dates = len(self.get_available_dates()) available_masks = self.get_available_dates_masks() if nb_available_dates != len(available_masks): error = (f"Available dates ({nb_available_dates}) and avaibles " f" masks ({len(available_masks)}) are different") logger.error(error) raise Exception(error) time_series_dir = os.path.join(self.features_dir, "tmp") ensure_dir(time_series_dir, raise_exe=False) times_series_masks_raster = os.path.join(time_series_dir, self.time_series_masks_name) dates_time_series = CreateConcatenateImagesApplication({ "il": available_masks, "out": times_series_masks_raster, "pixType": "int16", "ram": str(ram) }) dep = [] return dates_time_series, dep, len(available_masks)
def get_time_series(self, ram=128): """ TODO : be able of using a date interval Return ------ list [(otb_Application, some otb's objects), time_series_labels] Functions dealing with otb's application instance has to returns every objects in the pipeline """ import os from iota2.Common.OtbAppBank import CreateConcatenateImagesApplication from iota2.Common.FileUtils import ensure_dir # needed to travel throught iota2's library app_dep = [] preprocessed_dates = self.preprocess(working_dir=None, ram=str(ram)) if self.write_dates_stack is False: dates_concatenation = [] for _, dico_date in list(preprocessed_dates.items()): for _, reproj_date in list(dico_date["data"].items()): dates_concatenation.append(reproj_date) reproj_date.Execute() app_dep.append(reproj_date) else: dates_concatenation = self.get_available_dates() time_series_dir = os.path.join(self.features_dir, "tmp") ensure_dir(time_series_dir, raise_exe=False) times_series_raster = os.path.join(time_series_dir, self.time_series_name) dates_time_series = CreateConcatenateImagesApplication({ "il": dates_concatenation, "out": times_series_raster, "pixType": "int16", "ram": str(ram), }) _, dates_in = self.write_dates_file() # build labels features_labels = [ "{}_{}_{}".format(self.__class__.name, band_name, date) for date in dates_in for band_name in self.stack_band_position ] # if not all bands must be used if self.extracted_bands: app_dep.append(dates_time_series) (dates_time_series, features_labels) = self.extract_bands_time_series( dates_time_series, dates_in, len(self.stack_band_position), self.extracted_bands, ram, ) return (dates_time_series, app_dep), features_labels
def get_time_series_masks(self, ram=128): """ TODO : be able of using a date interval Return ------ list [(otb_Application, some otb's objects), time_series_labels] Functions dealing with otb's application instance has to returns every objects in the pipeline """ import os from iota2.Common.OtbAppBank import CreateConcatenateImagesApplication from iota2.Common.FileUtils import ensure_dir # needed to travel throught iota2's library app_dep = [] preprocessed_dates = self.preprocess(working_dir=None, ram=str(ram)) dates_masks = [] if self.write_dates_stack is False: for _, dico_date in list(preprocessed_dates.items()): mask_app, mask_app_dep = dico_date["mask"] mask_app.Execute() dates_masks.append(mask_app) app_dep.append(mask_app) app_dep.append(mask_app_dep) else: dates_masks = self.get_available_dates_masks() time_series_dir = os.path.join(self.features_dir, "tmp") ensure_dir(time_series_dir, raise_exe=False) times_series_mask = os.path.join(time_series_dir, self.time_series_masks_name) dates_time_series_mask = CreateConcatenateImagesApplication({ "il": dates_masks, "out": times_series_mask, "pixType": "uint8", "ram": str(ram) }) return dates_time_series_mask, app_dep, len(dates_masks)
def get_time_series_masks(self, ram=128, logger=LOGGER): """ Due to the SAR data, masks series must be split by polarisation and orbit (ascending / descending) """ from iota2.Common.OtbAppBank import getSARstack from iota2.Common.OtbAppBank import CreateConcatenateImagesApplication (all_filtered, all_masks, interp_date_files, input_date_files) = getSARstack(self.s1_cfg, self.tile_name, self.all_tiles.split(" "), os.path.join(self.i2_output_path, "features"), workingDirectory=None) # to be clearer s1_masks = OrderedDict() nb_avail_masks = 0 for filtered, masks, _, _ in zip(all_filtered, all_masks, interp_date_files, input_date_files): sar_mode = os.path.basename( filtered.GetParameterValue("outputstack")) sar_mode = "_".join(os.path.splitext(sar_mode)[0].split("_")[0:-1]) polarisation = sar_mode.split("_")[1] orbit = sar_mode.split("_")[2] mask_orbit_pol_name = f"{self.mask_orbit_pol_name}_{orbit}_{polarisation}.tif" mask_orbit_pol = os.path.join(self.features_dir, "tmp", mask_orbit_pol_name) masks_app = CreateConcatenateImagesApplication({ "il": masks, "out": mask_orbit_pol, "pixType": "uint8" if len(masks) > 255 else "uint16", "ram": str(ram) }) s1_masks[sar_mode] = masks_app nb_avail_masks += len(masks) dependancies = [] return s1_masks, dependancies, nb_avail_masks
def get_features(self, ram=128): """ get features """ import os from iota2.Common.OtbAppBank import CreateConcatenateImagesApplication from iota2.Common.OtbAppBank import computeUserFeatures from iota2.Common.OtbAppBank import CreateIota2FeatureExtractionApplication from iota2.Common.FileUtils import ensure_dir features_dir = os.path.join(self.features_dir, "tmp") ensure_dir(features_dir, raise_exe=False) features_out = os.path.join(features_dir, self.features_names) ((in_stack, in_stack_dep), in_stack_features_labels) = self.get_time_series_gapfilling() _, dates_enabled = self.write_interpolation_dates_file() if not self.enable_gapfilling: (in_stack, in_stack_dep), in_stack_features_labels = self.get_time_series() _, dates_enabled = self.write_dates_file() in_stack.Execute() app_dep = [] if self.hand_features_flag: hand_features = self.hand_features comp = (len(self.stack_band_position) if not self.extracted_bands else len(self.extracted_bands)) (user_date_features, fields_userfeat, user_feat_date, stack) = computeUserFeatures(in_stack, dates_enabled, comp, hand_features.split(",")) user_date_features.Execute() app_dep.append([user_date_features, user_feat_date, stack]) if self.features: bands_avail = self.stack_band_position if self.extracted_bands: bands_avail = [ band_name for band_name, _ in self.extracted_bands ] # check mandatory bands if "B4" not in bands_avail: raise Exception( "red band (B4) is needed to compute features") if "B5" not in bands_avail: raise Exception( "nir band (B5) is needed to compute features") if "B6" not in bands_avail: raise Exception( "swir band (B6) is needed to compute features") feat_parameters = { "in": in_stack, "out": features_out, "comp": len(bands_avail), "red": bands_avail.index("B4") + 1, "nir": bands_avail.index("B5") + 1, "swir": bands_avail.index("B6") + 1, "copyinput": self.copy_input, "relrefl": self.rel_refl, "keepduplicates": self.keep_dupl, "acorfeat": self.acorfeat, "pixType": "int16", "ram": str(ram), } features_app = CreateIota2FeatureExtractionApplication( feat_parameters) if self.copy_input is False: in_stack_features_labels = [] features_labels = ( in_stack_features_labels + self.get_features_labels(dates_enabled, self.rel_refl, self.keep_dupl, self.copy_input)) else: features_app = in_stack features_labels = in_stack_features_labels app_dep.append([in_stack, in_stack_dep]) if self.hand_features_flag: features_app.Execute() app_dep.append(features_app) features_app = CreateConcatenateImagesApplication({ "il": [features_app, user_date_features], "out": features_out, "ram": str(ram), }) features_labels += fields_userfeat return (features_app, app_dep), features_labels
def preprocess_date(self, date_dir, out_prepro, working_dir=None, ram=128, logger=LOGGER): """ Preprocess each date """ import os import shutil from gdal import Warp import multiprocessing as mp from osgeo.gdalconst import GDT_Byte from collections import OrderedDict from iota2.Common.FileUtils import ensure_dir from iota2.Common.FileUtils import getRasterProjectionEPSG from iota2.Common.FileUtils import FileSearch_AND from iota2.Common.OtbAppBank import CreateConcatenateImagesApplication from iota2.Common.OtbAppBank import CreateSuperimposeApplication from iota2.Common.OtbAppBank import executeApp # manage directories date_stack_name = self.build_stack_date_name(date_dir) logger.debug(f"preprocessing {date_dir}") out_stack = os.path.join(date_dir, date_stack_name) if out_prepro: _, date_dir_name = os.path.split(date_dir) out_dir = os.path.join(out_prepro, date_dir_name) if not os.path.exists(out_dir): try: os.mkdir(out_dir) except OSError: logger.warning(f"{out_dir} already exists") out_stack = os.path.join(out_dir, date_stack_name) out_stack_processing = out_stack if working_dir: out_stack_processing = os.path.join(working_dir, date_stack_name) # get bands date_bands = [ FileSearch_AND(date_dir, True, "{}_{}.tif".format(self.data_type, bands_name))[0] for bands_name in self.stack_band_position ] # tile reference image generation base_ref = date_bands[0] ensure_dir(os.path.dirname(self.ref_image), raise_exe=False) base_ref_projection = getRasterProjectionEPSG(base_ref) if not os.path.exists(self.ref_image): logger.info( f"reference image generation {self.ref_image} from {base_ref}") Warp(self.ref_image, base_ref, multithread=True, format="GTiff", xRes=self.native_res, yRes=self.native_res, outputType=GDT_Byte, srcSRS="EPSG:{}".format(base_ref_projection), dstSRS="EPSG:{}".format(self.target_proj)) # reproject / resample bands_proj = OrderedDict() all_reproj = [] for band, band_name in zip(date_bands, self.stack_band_position): superimp, _ = CreateSuperimposeApplication({ "inr": self.ref_image, "inm": band, "ram": str(ram) }) bands_proj[band_name] = superimp all_reproj.append(superimp) if self.write_dates_stack: for reproj in all_reproj: reproj.Execute() date_stack = CreateConcatenateImagesApplication({ "il": all_reproj, "ram": str(ram), "pixType": "int16", "out": out_stack_processing }) same_proj = False if os.path.exists(out_stack): same_proj = int(getRasterProjectionEPSG(out_stack)) == int( self.target_proj) if not os.path.exists(out_stack) or same_proj is False: # ~ date_stack.ExecuteAndWriteOutput() multi_proc = mp.Process(target=executeApp, args=[date_stack]) multi_proc.start() multi_proc.join() if working_dir: shutil.copy(out_stack_processing, out_stack) os.remove(out_stack_processing) return bands_proj if self.write_dates_stack is False else out_stack
def get_time_series_masks(self, ram=128): """ get time series masks """ import os import glob from iota2.Common.OtbAppBank import CreateConcatenateImagesApplication from iota2.Common.OtbAppBank import CreateSuperimposeApplication from iota2.Common.OtbAppBank import CreateBandMathApplication from iota2.Common.FileUtils import ensure_dir from iota2.Common.FileUtils import getRasterProjectionEPSG time_series_dir = os.path.join(self.features_dir, "tmp") ensure_dir(time_series_dir, raise_exe=False) times_series_mask = os.path.join(time_series_dir, self.time_series_masks_name) # needed to travel throught iota2's library app_dep = [] input_dates = [ os.path.join(self.tile_directory, cdir) for cdir in os.listdir(self.tile_directory) ] input_dates = self.sort_dates_directories(input_dates) # get date's data date_data = [] div_mask_patter = list(self.masks_rules.keys())[self.border_pos] cloud_mask_patter = list(self.masks_rules.keys())[self.cloud_pos] sat_mask_patter = list(self.masks_rules.keys())[self.sat_pos] if self.vhr_path.lower() != "none": div_mask_patter = div_mask_patter.replace(".TIF", "_COREG.TIF") cloud_mask_patter = div_mask_patter.replace(".TIF", "_COREG.TIF") sat_mask_patter = div_mask_patter.replace(".TIF", "_COREG.TIF") for date_dir in input_dates: div_mask = glob.glob( os.path.join(date_dir, f"{self.struct_path_masks}{div_mask_patter}"))[0] cloud_mask = glob.glob( os.path.join( date_dir, f"{self.struct_path_masks}{cloud_mask_patter}"))[0] sat_mask = glob.glob( os.path.join(date_dir, f"{self.struct_path_masks}{sat_mask_patter}"))[0] # im1 = div, im2 = cloud, im3 = sat div_expr = "(1-(im1b1/2==rint(im1b1/2)))" cloud_expr = "im2b1" sat_expr = "im3b1" # expr = "*".join([div_expr, cloud_expr, sat_expr]) expr = f"({div_expr} + {cloud_expr} + {sat_expr})==0?0:1" date_binary_mask = CreateBandMathApplication({ "il": [div_mask, cloud_mask, sat_mask], "exp": expr }) date_binary_mask.Execute() date_data.append(date_binary_mask) app_dep.append(date_binary_mask) dates_time_series_mask = CreateConcatenateImagesApplication({ "il": date_data, "ram": str(ram), "out": times_series_mask }) origin_proj = getRasterProjectionEPSG(sat_mask) if int(origin_proj) != int(self.target_proj): dates_time_series_mask.Execute() app_dep.append(dates_time_series_mask) self.generate_raster_ref(sat_mask) dates_time_series_mask, _ = CreateSuperimposeApplication({ "inr": self.ref_image, "inm": dates_time_series_mask, "interpolator": "nn", "out": times_series_mask, "ram": str(ram) }) return dates_time_series_mask, app_dep, len(date_data)
def get_time_series(self, ram=128): """ TODO : be able of using a date interval Return ------ list [(otb_Application, some otb's objects), time_series_labels] Functions dealing with otb's application instance has to returns every objects in the pipeline """ import os from iota2.Common.OtbAppBank import CreateConcatenateImagesApplication from iota2.Common.OtbAppBank import CreateSuperimposeApplication from iota2.Common.FileUtils import ensure_dir from iota2.Common.FileUtils import getRasterProjectionEPSG from iota2.Common.FileUtils import FileSearch_AND # needed to travel throught iota2's library app_dep = [] input_dates = [ os.path.join(self.tile_directory, cdir) for cdir in os.listdir(self.tile_directory) ] input_dates = self.sort_dates_directories(input_dates) # get date's data date_data = [] for date_dir in input_dates: l5_old_date = FileSearch_AND(date_dir, True, self.data_type, ".TIF")[0] if self.vhr_path.lower() != "none": l5_old_date = FileSearch_AND(date_dir, True, self.data_type, "COREG", ".TIF")[0] date_data.append(l5_old_date) time_series_dir = os.path.join(self.features_dir, "tmp") ensure_dir(time_series_dir, raise_exe=False) times_series_raster = os.path.join(time_series_dir, self.time_series_name) dates_time_series = CreateConcatenateImagesApplication({ "il": date_data, "out": times_series_raster, "ram": str(ram) }) _, dates_in = self.write_dates_file() # build labels features_labels = [ f"{self.__class__.name}_{band_name}_{date}" for date in dates_in for band_name in self.stack_band_position ] # if not all bands must be used if self.extracted_bands: app_dep.append(dates_time_series) (dates_time_series, features_labels) = self.extract_bands_time_series( dates_time_series, dates_in, len(self.stack_band_position), self.extracted_bands, ram) origin_proj = getRasterProjectionEPSG(date_data[0]) if int(origin_proj) != int(self.target_proj): dates_time_series.Execute() app_dep.append(dates_time_series) self.generate_raster_ref(date_data[0]) dates_time_series, _ = CreateSuperimposeApplication({ "inr": self.ref_image, "inm": self.masks_rules, "out": times_series_raster, "ram": str(ram) }) return (dates_time_series, app_dep), features_labels
def preprocess_date(self, date_dir, out_prepro, working_dir=None, ram=128, logger=LOGGER): """ preprocess date """ import os import shutil from collections import OrderedDict from gdal import Warp from osgeo.gdalconst import GDT_Byte import multiprocessing as mp from iota2.Common.FileUtils import ensure_dir from iota2.Common.FileUtils import FileSearch_AND from iota2.Common.FileUtils import getRasterProjectionEPSG from iota2.Common.OtbAppBank import CreateConcatenateImagesApplication from iota2.Common.OtbAppBank import CreateSuperimposeApplication from iota2.Common.OtbAppBank import executeApp # manage directories date_stack_name = self.build_date_name(date_dir, self.suffix) logger.debug(f"preprocessing {date_dir}") r10_dir = self.get_date_dir(date_dir, 10) out_stack = os.path.join(r10_dir, date_stack_name) if out_prepro: out_dir = r10_dir.replace(date_dir, out_prepro) ensure_dir(out_dir, raise_exe=False) out_stack = os.path.join(out_dir, date_stack_name) out_stack_processing = out_stack if working_dir: out_stack_processing = os.path.join(working_dir, date_stack_name) # get bands date_bands = [] for band in self.stack_band_position: if band in ["B02", "B03", "B04", "B08"]: date_bands.append( FileSearch_AND(date_dir, True, "{}_".format(self.tile_name), "{}_10m.jp2".format(band))[0]) elif band in ["B05", "B06", "B07", "B8A", "B11", "B12"]: date_bands.append( FileSearch_AND(date_dir, True, "{}_".format(self.tile_name), "{}_20m.jp2".format(band))[0]) # tile reference image generation base_ref = date_bands[0] logger.info(f"reference image generation {self.ref_image}" f" from {base_ref}") ensure_dir(os.path.dirname(self.ref_image), raise_exe=False) base_ref_projection = getRasterProjectionEPSG(base_ref) if not os.path.exists(self.ref_image): Warp(self.ref_image, base_ref, multithread=True, format="GTiff", xRes=10, yRes=10, outputType=GDT_Byte, srcSRS="EPSG:{}".format(base_ref_projection), dstSRS="EPSG:{}".format(self.target_proj)) # reproject / resample bands_proj = OrderedDict() all_reproj = [] for band, band_name in zip(date_bands, self.stack_band_position): superimp, _ = CreateSuperimposeApplication({ "inr": self.ref_image, "inm": band, "ram": str(ram) }) bands_proj[band_name] = superimp all_reproj.append(superimp) if self.write_dates_stack: for reproj in all_reproj: reproj.Execute() date_stack = CreateConcatenateImagesApplication({ "il": all_reproj, "ram": str(ram), "pixType": "int16", "out": out_stack_processing }) same_proj = False if os.path.exists(out_stack): same_proj = int(getRasterProjectionEPSG(out_stack)) == int( self.target_proj) if not os.path.exists(out_stack) or same_proj is False: # date_stack.ExecuteAndWriteOutput() multi_proc = mp.Process(target=executeApp, args=[date_stack]) multi_proc.start() multi_proc.join() if working_dir: shutil.copy(out_stack_processing, out_stack) os.remove(out_stack_processing) return bands_proj if self.write_dates_stack is False else out_stack
def validity(tile_name, config_path, output_path, maskOut_name, view_threshold, workingDirectory=None, RAM=128): """ function dedicated to compute validity raster/vector by tile Parameters ---------- tile_name [string] tile's name config_path [string] absolute path to the configuration file maskOut_name [string] output vector mask's name view_threshold [int] threshold working_directory [string] absolute path to a working directory RAM [int] pipeline's size (Mo) """ import os import shutil from iota2.Common.ServiceConfigFile import iota2_parameters from iota2.Sensors.Sensors_container import sensors_container from iota2.Common.OtbAppBank import CreateConcatenateImagesApplication from iota2.Common.OtbAppBank import CreateBandMathApplication from iota2.Common.Utils import run from iota2.Common.FileUtils import erodeShapeFile from iota2.Common.FileUtils import removeShape from iota2.Common.FileUtils import ensure_dir features_dir = os.path.join(output_path, "features", tile_name) validity_name = "nbView.tif" validity_out = os.path.join(features_dir, validity_name) validity_processing = validity_out if workingDirectory: ensure_dir(os.path.join(workingDirectory, tile_name)) validity_processing = os.path.join(workingDirectory, tile_name, validity_name) running_parameters = iota2_parameters(config_path) sensors_parameters = running_parameters.get_sensors_parameters(tile_name) remote_sensor_container = sensors_container(tile_name, workingDirectory, output_path, **sensors_parameters) sensors_time_series_masks = remote_sensor_container.get_sensors_time_series_masks( available_ram=RAM) sensors_masks_size = [] sensors_masks = [] for sensor_name, (time_series_masks, time_series_dep, nb_bands) in sensors_time_series_masks: if sensor_name.lower() == "sentinel1": for _, time_series_masks_app in list(time_series_masks.items()): time_series_masks_app.Execute() sensors_masks.append(time_series_masks_app) else: time_series_masks.Execute() sensors_masks.append(time_series_masks) sensors_masks_size.append(nb_bands) total_dates = sum(sensors_masks_size) merge_masks = CreateConcatenateImagesApplication({ "il": sensors_masks, "ram": str(RAM) }) merge_masks.Execute() validity_app = CreateBandMathApplication({ "il": merge_masks, "exp": "{}-({})".format( total_dates, "+".join(["im1b{}".format(i + 1) for i in range(total_dates)])), "ram": str(0.7 * RAM), "pixType": "uint8" if total_dates < 255 else "uint16", "out": validity_processing }) if not os.path.exists(os.path.join(features_dir, validity_name)): validity_app.ExecuteAndWriteOutput() if workingDirectory: shutil.copy(validity_processing, os.path.join(features_dir, validity_name)) threshold_raster_out = os.path.join(features_dir, maskOut_name.replace(".shp", ".tif")) threshold_vector_out_tmp = os.path.join( features_dir, maskOut_name.replace(".shp", "_TMP.shp")) threshold_vector_out = os.path.join(features_dir, maskOut_name) input_threshold = validity_processing if os.path.exists( validity_processing) else validity_out threshold_raster = CreateBandMathApplication({ "il": input_threshold, "exp": "im1b1>={}?1:0".format(view_threshold), "ram": str(0.7 * RAM), "pixType": "uint8", "out": threshold_raster_out }) threshold_raster.ExecuteAndWriteOutput() cmd_poly = f"gdal_polygonize.py -mask {threshold_raster_out} {threshold_raster_out} -f \"ESRI Shapefile\" {threshold_vector_out_tmp} {os.path.splitext(os.path.basename(threshold_vector_out_tmp))[0]} cloud" run(cmd_poly) erodeShapeFile(threshold_vector_out_tmp, threshold_vector_out, 0.1) os.remove(threshold_raster_out) removeShape(threshold_vector_out_tmp.replace(".shp", ""), [".prj", ".shp", ".dbf", ".shx"])
def get_features(self, ram=128, logger=LOGGER): """get sar features """ import configparser from iota2.Common.FileUtils import getNbDateInTile, FileSearch_AND from iota2.Common.OtbAppBank import CreateConcatenateImagesApplication from iota2.Common.OtbAppBank import generateSARFeat_dates from iota2.Common.OtbAppBank import getInputParameterOutput if self.use_gapfilling: (s1_data, dependancies), s1_labels = self.get_time_series_gapFilling(ram) else: (s1_data, dependancies), s1_labels = self.get_time_series(ram) config = configparser.ConfigParser() config.read(self.s1_cfg) sar_features_expr = None if config.has_option("Features", "expression"): sar_features_expr_cfg = config.get("Features", "expression") if not "none" in sar_features_expr_cfg.lower(): sar_features_expr = sar_features_expr_cfg.split(",") dependancies = [dependancies] s1_features = [] sar_time_series = { "asc": { "vv": { "App": None, "availDates": None }, "vh": { "App": None, "availDates": None } }, "des": { "vv": { "App": None, "availDates": None }, "vh": { "App": None, "availDates": None } } } for sensor_mode, time_series_app in list(s1_data.items()): _, polarisation, orbit = sensor_mode.split("_") # inputs if self.write_outputs_flag is False: time_series_app.Execute() else: time_series_raster = time_series_app.GetParameterValue( getInputParameterOutput(time_series_app)) if not os.path.exists(time_series_raster): time_series_app.ExecuteAndWriteOutput() if os.path.exists(time_series_raster): time_series_app = time_series_raster sar_time_series[orbit.lower()][ polarisation.lower()]["App"] = time_series_app s1_features.append(time_series_app) dependancies.append(time_series_app) if self.use_gapfilling: date_file = FileSearch_AND( self.features_dir, True, "{}_{}_dates_interpolation.txt".format( polarisation.lower(), orbit.upper()))[0] else: tar_dir = os.path.join(config.get("Paths", "output"), self.tile_name[1:]) date_file = FileSearch_AND( tar_dir, True, "{}_{}_dates_input.txt".format(polarisation.lower(), orbit.upper()))[0] sar_time_series[orbit.lower()][ polarisation.lower()]["availDates"] = getNbDateInTile( date_file, display=False, raw_dates=True) features_labels = [] for sensor_mode, features in list(s1_labels.items()): features_labels += features if sar_features_expr: sar_user_features_raster = os.path.join( self.features_dir, "tmp", self.user_sar_features_name) user_sar_features, user_sar_features_lab = generateSARFeat_dates( sar_features_expr, sar_time_series, sar_user_features_raster) if self.write_outputs_flag is False: user_sar_features.Execute() else: if not os.path.exists(sar_user_features_raster): user_sar_features.ExecuteAndWriteOutput() if os.path.exists(sar_user_features_raster): user_sar_features = sar_user_features_raster dependancies.append(user_sar_features) s1_features.append(user_sar_features) features_labels += user_sar_features_lab sar_features_raster = os.path.join(self.features_dir, "tmp", self.sar_features_name) sar_features = CreateConcatenateImagesApplication({ "il": s1_features, "out": sar_features_raster, "ram": str(ram) }) return (sar_features, dependancies), features_labels
def get_time_series_gapFilling(self, ram=128): """ Due to the SAR data, time series must be split by polarisation and orbit (ascending / descending) """ import configparser from iota2.Common.FileUtils import getNbDateInTile from iota2.Common.OtbAppBank import getSARstack from iota2.Common.OtbAppBank import CreateConcatenateImagesApplication from iota2.Common.OtbAppBank import CreateImageTimeSeriesGapFillingApplication from iota2.Common.OtbAppBank import getInputParameterOutput (all_filtered, all_masks, interp_date_files, input_date_files) = getSARstack(self.s1_cfg, self.tile_name, self.all_tiles.split(" "), os.path.join(self.i2_output_path, "features"), workingDirectory=None) # to be clearer s1_data = OrderedDict() s1_labels = OrderedDict() config = configparser.ConfigParser() config.read(self.s1_cfg) interpolation_method = "linear" if config.has_option("Processing", "gapFilling_interpolation"): interpolation_method = config.get("Processing", "gapFilling_interpolation") dependancies = [] for filtered, masks, interp_dates, in_dates in zip( all_filtered, all_masks, interp_date_files, input_date_files): sar_mode = os.path.basename( filtered.GetParameterValue("outputstack")) sar_mode = "_".join(os.path.splitext(sar_mode)[0].split("_")[0:-1]) polarisation = sar_mode.split("_")[1] orbit = sar_mode.split("_")[2] gapfilling_orbit_pol_name_masks = f"{self.gapfilling_orbit_pol_name_mask}_{orbit}_{polarisation}.tif" gapfilling_raster_mask = os.path.join( self.features_dir, "tmp", gapfilling_orbit_pol_name_masks) masks_stack = CreateConcatenateImagesApplication({ "il": masks, "out": gapfilling_raster_mask, "ram": str(ram) }) if self.write_outputs_flag is False: filtered.Execute() masks_stack.Execute() else: filtered_raster = filtered.GetParameterValue( getInputParameterOutput(filtered)) masks_stack_raster = masks_stack.GetParameterValue( getInputParameterOutput(masks_stack)) if not os.path.exists(masks_stack_raster): masks_stack.ExecuteAndWriteOutput() if not os.path.exists(filtered_raster): filtered.ExecuteAndWriteOutput() if os.path.exists(masks_stack_raster): masks_stack = masks_stack_raster if os.path.exists(filtered_raster): filtered = filtered_raster dependancies.append((filtered, masks_stack)) gapfilling_orbit_pol_name = f"{self.gapfilling_orbit_pol_name}_{orbit}_{polarisation}.tif" gapfilling_raster = os.path.join(self.features_dir, "tmp", gapfilling_orbit_pol_name) gap_app = CreateImageTimeSeriesGapFillingApplication({ "in": filtered, "mask": masks_stack, "it": interpolation_method, "id": in_dates, "od": interp_dates, "comp": str(1), "out": gapfilling_raster }) s1_data[sar_mode] = gap_app sar_dates = sorted(getNbDateInTile(interp_dates, display=False, raw_dates=True), key=lambda x: int(x)) labels = [ "{}_{}_{}_{}".format(self.__class__.name, orbit, polarisation, date).lower() for date in sar_dates ] s1_labels[sar_mode] = labels return (s1_data, dependancies), s1_labels
def generate_fake_l8_old_data(root_directory: str, tile_name: str, dates: List[str], res: Optional[float] = 30.0): """ Parameters ---------- root_directory : string path to generate Sentinel-2 dates tile_name : string THEIA tile name (ex:T31TCJ) dates : list list of strings reprensentig dates format : YYYYMMDD """ tile_dir = os.path.join(root_directory, tile_name) ensure_dir(tile_dir) band_of_interest = ["B1", "B2", "B3", "B4", "B5", "B6", "B7"] masks_of_interest = ["DIV", "BINARY_MASK", "NUA", "SAT"] origin_x = 566377 origin_y = 6284029 array_name = "iota2_binary" for date in dates: date_dir = os.path.join( tile_dir, (f"LANDSAT8_OLITIRS_XS_{date}_N2A_{tile_name}")) mask_date_dir = os.path.join(date_dir, "MASK") ensure_dir(date_dir) ensure_dir(mask_date_dir) all_bands = [] for cpt, mask in enumerate(masks_of_interest): new_mask = os.path.join(mask_date_dir, (f"LANDSAT8_OLITIRS_XS_{date}_N2A" f"_{tile_name}_{mask}.TIF")) array_to_raster(fun_array(array_name) * cpt % 2, new_mask, pixel_size=res, origin_x=origin_x, origin_y=origin_y) for band in band_of_interest: new_band = os.path.join(date_dir, (f"LANDSAT8_OLITIRS_XS_{date}_N2A" f"_{tile_name}_{band}.TIF")) all_bands.append(new_band) array = fun_array(array_name) random_array = [] for val in array: val_tmp = [] for pix_val in val: val_tmp.append(pix_val * random.random() * 1000) random_array.append(val_tmp) array_to_raster(np.array(random_array), new_band, pixel_size=res, origin_x=origin_x, origin_y=origin_y) stack_date = os.path.join(date_dir, (f"LANDSAT8_OLITIRS_XS_{date}_" "N2A_ORTHO_SURF_CORR" f"_PENTE_{tile_name}.TIF")) stack_app = CreateConcatenateImagesApplication({ "il": all_bands, "out": stack_date }) stack_app.ExecuteAndWriteOutput()
def generate_fake_s2_l3a_data(root_directory: str, tile_name: str, dates: List[str], res: Optional[float] = 30.0): """ Parameters ---------- root_directory : string path to generate Sentinel-2 l3a dates tile_name : string THEIA tile name (ex:T31TCJ) dates : list list of strings reprensentig dates format : YYYYMMDD """ tile_dir = os.path.join(root_directory, tile_name) ensure_dir(tile_dir) band_of_interest = [ "B2", "B3", "B4", "B5", "B6", "B7", "B8", "B8A", "B11", "B12" ] masks_of_interest = ["BINARY_MASK", "FLG_R1"] origin_x = 566377 origin_y = 6284029 array_name = "iota2_binary" for date in dates: date_dir = os.path.join(tile_dir, ("SENTINEL2X_{}-000000-" "000_L3A_{}_D_V1-7".format(date, tile_name))) mask_date_dir = os.path.join(date_dir, "MASKS") ensure_dir(date_dir) ensure_dir(mask_date_dir) all_bands = [] for cpt, mask in enumerate(masks_of_interest): new_mask = os.path.join( mask_date_dir, ("SENTINEL2X_{}-000000-000_L3A" "_{}_D_V1-7_{}.tif".format(date, tile_name, mask))) array_to_raster(fun_array(array_name) * cpt % 2, new_mask, pixel_size=res, origin_x=origin_x, origin_y=origin_y) for band in band_of_interest: new_band = os.path.join( date_dir, ("SENTINEL2X_{}-000000-000_L3A" "_{}_D_V1-7_FRC_{}.tif".format(date, tile_name, band))) all_bands.append(new_band) array = fun_array(array_name) random_array = [] for val in array: val_tmp = [] for pix_val in val: val_tmp.append(pix_val * random.random() * 1000) random_array.append(val_tmp) array_to_raster(np.array(random_array), new_band, pixel_size=res, origin_x=origin_x, origin_y=origin_y) stack_date = os.path.join( date_dir, ("SENTINEL2X_{}-000000-000_L3A_{}_D_V1-7" "_FRC_STACK.tif".format(date, tile_name))) stack_app = CreateConcatenateImagesApplication({ "il": all_bands, "out": stack_date }) stack_app.ExecuteAndWriteOutput()
def generateFeatures(pathWd: str, tile: str, sar_optical_post_fusion: bool, output_path: str, sensors_parameters: sensors_params, mode: Optional[str] = "usually"): """ usage : Function use to compute features according to a configuration file Parameters ---------- pathWd : str path to a working directory tile : str tile's name sar_optical_post_fusion : bool flag use to remove SAR data from features mode : str 'usually' / 'SAR' used to get only sar features """ from iota2.Common.OtbAppBank import getInputParameterOutput from iota2.Sensors.Sensors_container import sensors_container from iota2.Common.OtbAppBank import CreateConcatenateImagesApplication LOGGER.info(f"prepare features for tile : {tile}") sensor_tile_container = sensors_container(tile, pathWd, output_path, **sensors_parameters) feat_labels = [] dep = [] feat_app = [] if mode == "usually" and sar_optical_post_fusion is False: sensors_features = sensor_tile_container.get_sensors_features( available_ram=1000) for _, ((sensor_features, sensor_features_dep), features_labels) in sensors_features: sensor_features.Execute() feat_app.append(sensor_features) dep.append(sensor_features_dep) feat_labels = feat_labels + features_labels elif mode == "usually" and sar_optical_post_fusion is True: sensor_tile_container.remove_sensor("Sentinel1") sensors_features = sensor_tile_container.get_sensors_features( available_ram=1000) for _, ((sensor_features, sensor_features_dep), features_labels) in sensors_features: sensor_features.Execute() feat_app.append(sensor_features) dep.append(sensor_features_dep) feat_labels = feat_labels + features_labels elif mode == "SAR": sensor = sensor_tile_container.get_sensor("Sentinel1") (sensor_features, sensor_features_dep), feat_labels = sensor.get_features(ram=1000) sensor_features.Execute() feat_app.append(sensor_features) dep.append(sensor_features_dep) dep.append(feat_app) features_name = "{}_Features.tif".format(tile) features_dir = os.path.join(output_path, "features", tile, "tmp") features_raster = os.path.join(features_dir, features_name) if len(feat_app) > 1: all_features = CreateConcatenateImagesApplication({ "il": feat_app, "out": features_raster }) else: all_features = sensor_features output_param_name = getInputParameterOutput(sensor_features) all_features.SetParameterString(output_param_name, features_raster) return all_features, feat_labels, dep
def get_features(self, ram=128, logger=LOGGER): """generate user features. Concatenates all of them """ from gdal import Warp from osgeo.gdalconst import GDT_Byte from iota2.Common.OtbAppBank import CreateConcatenateImagesApplication from iota2.Common.OtbAppBank import CreateSuperimposeApplication from iota2.Common.FileUtils import FileSearch_AND from iota2.Common.FileUtils import ensure_dir from iota2.Common.FileUtils import getRasterProjectionEPSG from iota2.Common.FileUtils import getRasterResolution from iota2.Common.FileUtils import getRasterNbands features_dir = os.path.join(self.features_dir, "tmp") ensure_dir(features_dir, raise_exe=False) features_out = os.path.join(features_dir, self.features_names) user_features_found = [] user_features_bands = [] for pattern in self.data_type: user_feature = FileSearch_AND(self.tile_directory, True, pattern) if user_feature: user_features_bands.append(getRasterNbands(user_feature[0])) user_features_found.append(user_feature[0]) else: msg = "WARNING : '{}' not found in {}".format( pattern, self.tile_directory) logger.error(msg) raise Exception(msg) user_feat_stack = CreateConcatenateImagesApplication({ "il": user_features_found, "ram": str(ram), "out": features_out }) base_ref = user_features_found[0] base_ref_projection = getRasterProjectionEPSG(base_ref) if not os.path.exists(self.ref_image): base_ref_res_x, _ = getRasterResolution(base_ref) Warp(self.ref_image, base_ref, multithread=True, format="GTiff", xRes=base_ref_res_x, yRes=base_ref_res_x, outputType=GDT_Byte, srcSRS="EPSG:{}".format(base_ref_projection), dstSRS="EPSG:{}".format(self.target_proj)) app_dep = [] if int(base_ref_projection) != (self.target_proj): user_feat_stack.Execute() app_dep.append(user_feat_stack) user_feat_stack, _ = CreateSuperimposeApplication({ "inr": self.ref_image, "inm": user_feat_stack, "out": features_out, "ram": str(ram) }) features_labels = [ "{}_band_{}".format(pattern, band_num) for pattern, nb_bands in zip(self.data_type, user_features_bands) for band_num in range(nb_bands) ] return (user_feat_stack, app_dep), features_labels
def get_features(self, ram=128): """ get features """ import os import multiprocessing as mp from iota2.Common.OtbAppBank import CreateConcatenateImagesApplication from iota2.Common.OtbAppBank import computeUserFeatures from iota2.Common.OtbAppBank import CreateIota2FeatureExtractionApplication from iota2.Common.OtbAppBank import getInputParameterOutput from iota2.Common.FileUtils import ensure_dir from iota2.Common.OtbAppBank import executeApp features_dir = os.path.join(self.features_dir, "tmp") ensure_dir(features_dir, raise_exe=False) features_out = os.path.join(features_dir, self.features_names) # ~ features = self.cfg_IOTA2.getParam("GlobChain", "features") # ~ enable_gapFilling = self.cfg_IOTA2.getParam("GlobChain", # ~ "useGapFilling") # ~ hand_features_flag = self.cfg_IOTA2.getParam('GlobChain', # ~ 'useAdditionalFeatures') # input (in_stack, in_stack_dep ), in_stack_features_labels = self.get_time_series_gapfilling() _, dates_enabled = self.write_interpolation_dates_file() if not self.enable_gapfilling: (in_stack, in_stack_dep), in_stack_features_labels = self.get_time_series() _, dates_enabled = self.write_dates_file() if self.write_outputs_flag is False: in_stack.Execute() else: in_stack_raster = in_stack.GetParameterValue( getInputParameterOutput(in_stack)) if not os.path.exists(in_stack_raster): # in_stack.ExecuteAndWriteOutput() multi_proc = mp.Process(target=executeApp, args=[in_stack]) multi_proc.start() multi_proc.join() if os.path.exists(in_stack_raster): in_stack = in_stack_raster # output app_dep = [] if self.hand_features_flag: # ~ hand_features = self.cfg_IOTA2.getParam("Sentinel_2", # ~ "additionalFeatures") comp = len( self.stack_band_position) if not self.extracted_bands else len( self.extracted_bands) (user_date_features, fields_userfeat, user_feat_date, stack) = computeUserFeatures(in_stack, dates_enabled, comp, self.hand_features.split(",")) user_date_features.Execute() app_dep.append([user_date_features, user_feat_date, stack]) if self.features: bands_avail = self.stack_band_position if self.extracted_bands: bands_avail = [ band_name for band_name, _ in self.extracted_bands ] # check mandatory bands if "B4" not in bands_avail: raise Exception( "red band (B4) is needed to compute features") if "B8" not in bands_avail: raise Exception( "nir band (B8) is needed to compute features") if "B11" not in bands_avail: raise Exception( "swir band (B11) is needed to compute features") feat_parameters = { "in": in_stack, "out": features_out, "comp": len(bands_avail), "red": bands_avail.index("B4") + 1, "nir": bands_avail.index("B8") + 1, "swir": bands_avail.index("B11") + 1, "copyinput": self.copy_input, "relrefl": self.rel_refl, "keepduplicates": self.keep_dupl, "acorfeat": self.acorfeat, "pixType": "int16", "ram": str(ram) } features_app = CreateIota2FeatureExtractionApplication( feat_parameters) if self.copy_input is False: in_stack_features_labels = [] features_labels = ( in_stack_features_labels + self.get_features_labels(dates_enabled, self.rel_refl, self.keep_dupl, self.copy_input)) else: features_app = in_stack features_labels = in_stack_features_labels app_dep.append([in_stack, in_stack_dep]) if self.hand_features_flag: features_app.Execute() app_dep.append(features_app) features_app = CreateConcatenateImagesApplication({ "il": [features_app, user_date_features], "out": features_out, "ram": str(ram) }) features_labels += fields_userfeat return (features_app, app_dep), features_labels