def setUpClass(cls): # definition of local variables cls.group_test_name = "iota2_test_sensors_test" cls.iota2_tests_directory = os.path.join(IOTA2DIR, "data", cls.group_test_name) cls.all_tests_ok = [] cls.test_working_directory = None if os.path.exists(cls.iota2_tests_directory): shutil.rmtree(cls.iota2_tests_directory) os.mkdir(cls.iota2_tests_directory) cls.ref_sar_config_test = os.path.join( IOTA2DIR, "data", "test_vector", "ConfigurationFile_SAR_Test.cfg") cls.large_scale_data = "/work/OT/theia/oso/dataTest/test_LargeScale" cls.ref_sar_config = os.path.join(IOTA2DIR, "config", "SARconfig.cfg") cls.sar_features_path = os.path.join( IOTA2DIR, "data", "test_vector", "checkOnlySarFeatures_features_SAR") ensure_dir(cls.sar_features_path) cls.sar_data = os.path.join(cls.large_scale_data, "SAR_directory", "raw_data") cls.srtm = os.path.join(cls.large_scale_data, "SAR_directory", "SRTM") cls.geoid = os.path.join(cls.large_scale_data, "SAR_directory", "egm96.grd") cls.s2_large_scale = os.path.join(cls.large_scale_data, "S2_50x50") cls.tiles_shape = os.path.join(cls.large_scale_data, "SAR_directory", "Features.shp") cls.srtm_shape = os.path.join(cls.large_scale_data, "SAR_directory", "srtm.shp") cls.expected_labels = [ 'sentinel1_des_vv_20151231', 'sentinel1_des_vh_20151231' ]
def generate_data_tree(directory, mtd_s2st_date, s2st_ext="jp2"): """generate a fake Sen2Cor data TODO : replace this function by downloading a Sen2Cor data from PEPS. Return ------ products : list list of data ready to be generated """ # from xml.dom.minidom import parse import xml.dom.minidom ensure_dir(directory) dom_tree = xml.dom.minidom.parse(mtd_s2st_date) collection = dom_tree.documentElement general_info_node = collection.getElementsByTagName("n1:General_Info") date_dir = general_info_node[0].getElementsByTagName( 'PRODUCT_URI')[0].childNodes[0].data products = [] for product_organisation_nodes in general_info_node[ 0].getElementsByTagName('Product_Organisation'): img_list_nodes = product_organisation_nodes.getElementsByTagName( "IMAGE_FILE") for img_list in img_list_nodes: new_prod = os.path.join( directory, date_dir, "{}.{}".format(img_list.childNodes[0].data, s2st_ext)) new_prod_dir, _ = os.path.split(new_prod) ensure_dir(new_prod_dir) products.append(new_prod) return products
def write_interpolation_dates_file(self): """ TODO : mv to base-class """ import os from iota2.Common.FileUtils import getDateS2 from iota2.Common.FileUtils import ensure_dir from iota2.Common.FileUtils import dateInterval interp_date_dir = os.path.join(self.features_dir, "tmp") ensure_dir(interp_date_dir, raise_exe=False) interp_date_file = os.path.join(interp_date_dir, self.interpolated_dates) # get dates in the whole L8 data-set (getDateS2 -> avail to L8) date_interp_min, date_interp_max = getDateS2(self.l8_data, self.all_tiles.split(" ")) # force dates if not self.auto_date_flag: date_interp_min = self.date_interp_min_user date_interp_max = self.date_interp_max_user dates = [ str(date).replace("-", "") for date in dateInterval( date_interp_min, date_interp_max, self.temporal_res) ] if not os.path.exists(interp_date_file): with open(interp_date_file, "w") as interpolation_date_file: interpolation_date_file.write("\n".join(dates)) return interp_date_file, dates
def generate_fake_user_features_data(root_directory: str, tile_name: str, patterns: List[str]): """ Parameters ---------- root_directory : string path to generate Sentinel-2 dates tile_name : string THEIA tile name (ex:T31TCJ) dates : list List of raster's name """ tile_dir = os.path.join(root_directory, tile_name) ensure_dir(tile_dir) origin_x = 566377 origin_y = 6284029 array_name = "iota2_binary" array = fun_array(array_name) for pattern in patterns: user_features_path = os.path.join(tile_dir, f"{pattern}.tif") random_array = [] for val in array: val_tmp = [] for pix_val in val: val_tmp.append(pix_val * random.random() * 1000) random_array.append(val_tmp) array_to_raster(np.array(random_array), user_features_path, origin_x=origin_x, origin_y=origin_y)
def get_time_series_masks(self, ram=128, logger=LOGGER): """ """ from iota2.Common.OtbAppBank import CreateConcatenateImagesApplication from iota2.Common.FileUtils import ensure_dir from iota2.Common.FileUtils import FileSearch_AND time_series_dir = os.path.join(self.features_dir, "tmp") ensure_dir(time_series_dir, raise_exe=False) times_series_mask = os.path.join(time_series_dir, self.time_series_masks_name) # check patterns for pattern in self.data_type: user_feature = FileSearch_AND(self.tile_directory, True, pattern) if not user_feature: msg = "WARNING : '{}' not found in {}".format( pattern, self.tile_directory) logger.error(msg) raise Exception(msg) nb_patterns = len(self.data_type) masks = [] app_dep = [] for _ in range(nb_patterns): dummy_mask, _ = self.footprint(data_value=0) dummy_mask.Execute() app_dep.append(dummy_mask) masks.append(dummy_mask) masks_stack = CreateConcatenateImagesApplication({ "il": masks, "out": times_series_mask, "ram": str(ram) }) return masks_stack, app_dep, nb_patterns
def footprint(self, ram=128): """ footprint """ import os import glob from iota2.Common.OtbAppBank import CreateSuperimposeApplication from iota2.Common.OtbAppBank import CreateBandMathApplication from iota2.Common.FileUtils import ensure_dir from iota2.Common.FileUtils import FileSearch_AND footprint_dir = os.path.join(self.features_dir, "tmp") ensure_dir(footprint_dir, raise_exe=False) footprint_out = os.path.join(footprint_dir, self.footprint_name) input_dates = [ os.path.join(self.tile_directory, cdir) for cdir in os.listdir(self.tile_directory) ] input_dates = self.sort_dates_directories(input_dates) # get date's footprint date_edge = [] for date_dir in input_dates: date_edge.append( glob.glob( os.path.join( date_dir, f"{self.struct_path_masks}" f"{list(self.masks_rules.keys())[self.border_pos]}")) [0]) self.generate_raster_ref(date_edge[0]) # seek odd values, then sum it expr = [ f"(im{i+1}b1/2==rint(im{i+1}b1/2))" for i in range(len(date_edge)) ] expr = f"{'+'.join(expr)}>0?1:0" masks_rules = CreateBandMathApplication({ "il": date_edge, "ram": str(ram), "exp": expr }) masks_rules.Execute() app_dep = [masks_rules] reference_raster = self.ref_image if self.vhr_path.lower() != "none": reference_raster = FileSearch_AND(input_dates[0], True, self.data_type, "COREG", ".TIF")[0] superimp, _ = CreateSuperimposeApplication({ "inr": reference_raster, "inm": masks_rules, "out": footprint_out, "pixType": "uint8", "ram": str(ram) }) return superimp, app_dep
def get_time_series_masks(self, ram=128, logger=LOGGER): """ get time series masks """ import os from iota2.Common.FileUtils import ensure_dir from iota2.Common.OtbAppBank import CreateConcatenateImagesApplication preprocessed_dates = self.preprocess(working_dir=None, ram=str(ram)) if self.write_dates_stack is False: nb_available_dates = len(preprocessed_dates) else: nb_available_dates = len(self.get_available_dates()) available_masks = self.get_available_dates_masks() if nb_available_dates != len(available_masks): error = (f"Available dates ({nb_available_dates}) and avaibles " f" masks ({len(available_masks)}) are different") logger.error(error) raise Exception(error) time_series_dir = os.path.join(self.features_dir, "tmp") ensure_dir(time_series_dir, raise_exe=False) times_series_masks_raster = os.path.join(time_series_dir, self.time_series_masks_name) dates_time_series = CreateConcatenateImagesApplication({ "il": available_masks, "out": times_series_masks_raster, "pixType": "int16", "ram": str(ram) }) dep = [] return dates_time_series, dep, len(available_masks)
def footprint(self, ram=128): """ in this case (L3A), we consider the whole tile """ import os from iota2.Common.OtbAppBank import CreateBandMathApplication from iota2.Common.FileUtils import ensure_dir footprint_dir = os.path.join(self.features_dir, "tmp") ensure_dir(footprint_dir, raise_exe=False) footprint_out = os.path.join(footprint_dir, self.footprint_name) reference_raster = self.ref_image if self.vhr_path.lower() != "none": reference_raster = self.get_available_dates()[0] s2_l3a_border = CreateBandMathApplication({ "il": reference_raster, "out": footprint_out, "exp": "1", "pixType": "uint8", "ram": str(ram) }) # needed to travel throught iota2's library app_dep = [] return s2_l3a_border, app_dep
def setUpClass(cls): from iota2.Common.FileUtils import ensure_dir from iota2.Tests.UnitTests.tests_utils.tests_utils_rasters import generate_fake_s2_data # definition of local variables cls.originX = 566377 cls.originY = 6284029 cls.group_test_name = "iota_testAutoContext" cls.iota2_tests_directory = os.path.join(IOTA2DIR, "data", cls.group_test_name) cls.config_test = os.path.join( IOTA2DIR, "config", "Config_4Tuiles_Multi_FUS_Confidence.cfg") cls.ref_data = os.path.join(IOTA2DIR, "data", "references", "formatting_vectors", "Input", "formattingVectors", "T31TCJ.shp") cls.tile_name = "T31TCJ" cls.all_tests_ok = [] cls.test_working_directory = None if os.path.exists(cls.iota2_tests_directory): shutil.rmtree(cls.iota2_tests_directory, ignore_errors=True) os.mkdir(cls.iota2_tests_directory) # generate permanent fake data cls.fake_data_dir = os.path.join(cls.iota2_tests_directory, "fake_s2") ensure_dir(cls.fake_data_dir) generate_fake_s2_data(cls.fake_data_dir, "T31TCJ", ["20190909", "20190919", "20190929"])
def generate_raster_ref(self, base_ref, logger=LOGGER): """ generate raster ref """ import os from gdal import Warp from osgeo.gdalconst import GDT_Byte from iota2.Common.FileUtils import ensure_dir from iota2.Common.FileUtils import getRasterProjectionEPSG ensure_dir(os.path.dirname(self.ref_image), raise_exe=False) base_ref_projection = getRasterProjectionEPSG(base_ref) if not os.path.exists(self.ref_image): logger.info(f"reference image generation {self.ref_image} " f"from {base_ref}") Warp(self.ref_image, base_ref, multithread=True, format="GTiff", xRes=self.native_res, yRes=self.native_res, outputType=GDT_Byte, srcSRS="EPSG:{}".format(base_ref_projection), dstSRS="EPSG:{}".format(self.target_proj))
def get_time_series(self, ram=128): """ TODO : be able of using a date interval Return ------ list [(otb_Application, some otb's objects), time_series_labels] Functions dealing with otb's application instance has to returns every objects in the pipeline """ import os from iota2.Common.OtbAppBank import CreateConcatenateImagesApplication from iota2.Common.FileUtils import ensure_dir # needed to travel throught iota2's library app_dep = [] preprocessed_dates = self.preprocess(working_dir=None, ram=str(ram)) if self.write_dates_stack is False: dates_concatenation = [] for _, dico_date in list(preprocessed_dates.items()): for _, reproj_date in list(dico_date["data"].items()): dates_concatenation.append(reproj_date) reproj_date.Execute() app_dep.append(reproj_date) else: dates_concatenation = self.get_available_dates() time_series_dir = os.path.join(self.features_dir, "tmp") ensure_dir(time_series_dir, raise_exe=False) times_series_raster = os.path.join(time_series_dir, self.time_series_name) dates_time_series = CreateConcatenateImagesApplication({ "il": dates_concatenation, "out": times_series_raster, "pixType": "int16", "ram": str(ram), }) _, dates_in = self.write_dates_file() # build labels features_labels = [ "{}_{}_{}".format(self.__class__.name, band_name, date) for date in dates_in for band_name in self.stack_band_position ] # if not all bands must be used if self.extracted_bands: app_dep.append(dates_time_series) (dates_time_series, features_labels) = self.extract_bands_time_series( dates_time_series, dates_in, len(self.stack_band_position), self.extracted_bands, ram, ) return (dates_time_series, app_dep), features_labels
def test_instance_s1(self): """Tests if the class sentinel_1 can be instanciate """ from iota2.Sensors.Sentinel_1 import sentinel_1 def prepare_sar_config(): """prepare sentinel_1 configuration file """ from configparser import SafeConfigParser parser = SafeConfigParser() parser.read(self.ref_sar_config) parser.set('Paths', 'Output', self.sar_features_path) parser.set('Paths', 'S1Images', self.sar_data) parser.set('Paths', 'SRTM', self.srtm) parser.set('Paths', 'GeoidFile', self.geoid) parser.set('Processing', 'ReferencesFolder', self.s2_large_scale) parser.set('Processing', 'RasterPattern', "STACK.tif") parser.set('Processing', 'OutputSpatialResolution', '10') parser.set('Processing', 'TilesShapefile', self.tiles_shape) parser.set('Processing', 'SRTMShapefile', self.srtm_shape) with open(self.ref_sar_config_test, "w+") as config_file: parser.write(config_file) args = { "tile_name": "T31TCJ", "target_proj": 2154, "all_tiles": "T31TCJ", "image_directory": self.ref_sar_config_test, "write_dates_stack": False, "extract_bands_flag": False, "output_target_dir": None, "keep_bands": True, "i2_output_path": self.test_working_directory, "temporal_res": 10, "auto_date_flag": True, "date_interp_min_user": "", "date_interp_max_user": "", "write_outputs_flag": False, "features": ["NDVI", "NDWI", "Brightness"], "enable_gapfilling": True, "hand_features_flag": False, "hand_features": "", "copy_input": True, "rel_refl": False, "keep_dupl": True, "vhr_path": "none", "acorfeat": False } prepare_sar_config() ensure_dir( os.path.join(self.test_working_directory, "features", "T31TCJ", "tmp")) s1_sensor = sentinel_1(**args) (sar_features, _), features_labels = s1_sensor.get_features() sar_features.ExecuteAndWriteOutput() expected_output = sar_features.GetParameterString("out") self.assertTrue(os.path.exists(expected_output)) self.assertTrue(features_labels == self.expected_labels)
def footprint(self, ram=128): """ compute footprint of images """ import os import glob from iota2.Common.OtbAppBank import CreateSuperimposeApplication from iota2.Common.OtbAppBank import CreateBandMathApplication from iota2.Common.FileUtils import ensure_dir footprint_dir = os.path.join(self.features_dir, "tmp") ensure_dir(footprint_dir, raise_exe=False) footprint_out = os.path.join(footprint_dir, self.footprint_name) input_dates = [ os.path.join(self.tile_directory, cdir) for cdir in os.listdir(self.tile_directory) ] input_dates = self.sort_dates_directories(input_dates) # get date's footprint date_edge = [] for date_dir in input_dates: date_edge.append( glob.glob( os.path.join( date_dir, "{}{}".format( self.struct_path_masks, list(self.masks_rules.keys())[self.border_pos]))) [0]) expr = " || ".join("1 - im{}b1".format(i + 1) for i in range(len(date_edge))) s2_border = CreateBandMathApplication({ "il": date_edge, "exp": expr, "ram": str(ram) }) s2_border.Execute() reference_raster = self.ref_image if self.vhr_path.lower() != "none": reference_raster = self.get_available_dates()[0] # superimpose footprint superimp, _ = CreateSuperimposeApplication({ "inr": reference_raster, "inm": s2_border, "out": footprint_out, "pixType": "uint8", "ram": str(ram) }) # needed to travel throught iota2's library app_dep = [s2_border, _] return superimp, app_dep
def test_iota2_samples_statistics(self): """test generation of statistics by tiles """ from iota2.Sampling.SamplesStat import samples_stats from iota2.Common.FileUtils import ensure_dir ensure_dir( os.path.join(self.test_working_directory, "samplesSelection")) ensure_dir(os.path.join(self.test_working_directory, "shapeRegion")) raster_ref = os.path.join(self.test_working_directory, "RASTER_REF.tif") arr_test = TUR.fun_array("iota2_binary") TUR.array_to_raster(arr_test, raster_ref, origin_x=566377, origin_y=6284029) shutil.copy( raster_ref, os.path.join(self.test_working_directory, "shapeRegion", "T31TCJ_region_1_.tif")) references_directory = os.path.join(IOTA2DIR, "data", "references") region_shape = os.path.join(references_directory, "region_target.shp") shutil.copy( region_shape, os.path.join( os.path.join(self.test_working_directory, "samplesSelection", "T31TCJ_region_1_seed_0.shp"))) shutil.copy( region_shape.replace(".shp", ".shx"), os.path.join( os.path.join(self.test_working_directory, "samplesSelection", "T31TCJ_region_1_seed_0.shx"))) shutil.copy( region_shape.replace(".shp", ".dbf"), os.path.join( os.path.join(self.test_working_directory, "samplesSelection", "T31TCJ_region_1_seed_0.dbf"))) shutil.copy( region_shape.replace(".shp", ".prj"), os.path.join( os.path.join(self.test_working_directory, "samplesSelection", "T31TCJ_region_1_seed_0.prj"))) test_statistics = samples_stats( region_seed_tile=("1", "0", "T31TCJ"), iota2_directory=self.test_working_directory, data_field="region", working_directory=None) self.assertTrue( filecmp.cmp( os.path.join(IOTA2DIR, "data", "references", "T31TCJ_region_1_seed_0_stats.xml"), test_statistics))
def footprint(self, ram=128, data_value=1): """get footprint """ from gdal import Warp from osgeo.gdalconst import GDT_Byte from iota2.Common.FileUtils import FileSearch_AND from iota2.Common.OtbAppBank import CreateBandMathApplication from iota2.Common.FileUtils import ensure_dir from iota2.Common.FileUtils import getRasterProjectionEPSG from iota2.Common.FileUtils import getRasterResolution footprint_dir = os.path.join(self.features_dir, "tmp") ensure_dir(footprint_dir, raise_exe=False) footprint_out = os.path.join(footprint_dir, self.footprint_name) user_feature = FileSearch_AND(self.tile_directory, True, self.data_type[0]) # tile reference image generation base_ref = user_feature[0] LOGGER.info("reference image generation {} from {}".format( self.ref_image, base_ref)) ensure_dir(os.path.dirname(self.ref_image), raise_exe=False) base_ref_projection = getRasterProjectionEPSG(base_ref) base_ref_res_x, _ = getRasterResolution(base_ref) if not os.path.exists(self.ref_image): Warp(self.ref_image, base_ref, multithread=True, format="GTiff", xRes=base_ref_res_x, yRes=base_ref_res_x, outputType=GDT_Byte, srcSRS="EPSG:{}".format(base_ref_projection), dstSRS="EPSG:{}".format(self.target_proj)) # user features must not contains NODATA # -> "exp" : 'data_value' mean every data available footprint = CreateBandMathApplication({ "il": self.ref_image, "out": footprint_out, "exp": str(data_value), "pixType": "uint8", "ram": str(ram) }) # needed to travel throught iota2's library app_dep = [] return footprint, app_dep
def footprint(self, ram=128): """ compute footprint of images """ import os from iota2.Common.OtbAppBank import CreateSuperimposeApplication from iota2.Common.OtbAppBank import CreateBandMathApplication from iota2.Common.FileUtils import ensure_dir from iota2.Common.FileUtils import FileSearch_AND footprint_dir = os.path.join(self.features_dir, "tmp") ensure_dir(footprint_dir, raise_exe=False) footprint_out = os.path.join(footprint_dir, self.footprint_name) input_dates = [ os.path.join(self.tile_directory, cdir) for cdir in os.listdir(self.tile_directory) ] input_dates = self.sort_dates_directories(input_dates) all_scl = [] for date_dir in input_dates: r20m_dir = self.get_date_dir(date_dir, 20) scl = FileSearch_AND(r20m_dir, True, self.scene_classif)[0] all_scl.append(scl) sum_scl = "+".join( ["im{}b1".format(i + 1) for i in range(len(all_scl))]) edge = CreateBandMathApplication({ "il": all_scl, "exp": "{}==0?0:1".format(sum_scl) }) edge.Execute() app_dep = [edge] # superimpose footprint reference_raster = self.ref_image if self.vhr_path.lower() != "none": reference_raster = self.get_available_dates()[0] superimp, _ = CreateSuperimposeApplication({ "inr": reference_raster, "inm": edge, "out": footprint_out, "pixType": "uint8", "ram": str(ram) }) # needed to travel throught iota2's library app_dep.append(_) return superimp, app_dep
def get_time_series_gapfilling(self, ram=128): """ get time series gapfilling """ import os from iota2.Common.OtbAppBank import CreateImageTimeSeriesGapFillingApplication from iota2.Common.FileUtils import ensure_dir gap_dir = os.path.join(self.features_dir, "tmp") ensure_dir(gap_dir, raise_exe=False) gap_out = os.path.join(gap_dir, self.time_series_gapfilling_name) dates_interp_file, dates_interp = self.write_interpolation_dates_file() dates_in_file, _ = self.write_dates_file() masks, masks_dep, _ = self.get_time_series_masks() (time_series, time_series_dep), _ = self.get_time_series() time_series.Execute() masks.Execute() comp = len( self.stack_band_position) if not self.extracted_bands else len( self.extracted_bands) gap = CreateImageTimeSeriesGapFillingApplication({ "in": time_series, "mask": masks, "comp": str(comp), "it": "linear", "id": dates_in_file, "od": dates_interp_file, "out": gap_out, "ram": str(ram), "pixType": "int16" }) app_dep = [time_series, masks, masks_dep, time_series_dep] bands = self.stack_band_position if self.extracted_bands: bands = [band_name for band_name, band_pos in self.extracted_bands] features_labels = [ "{}_{}_{}".format(self.__class__.name, band_name, date) for date in dates_interp for band_name in bands ] return (gap, app_dep), features_labels
def get_time_series_masks(self, ram=128): """ TODO : be able of using a date interval Return ------ list [(otb_Application, some otb's objects), time_series_labels] Functions dealing with otb's application instance has to returns every objects in the pipeline """ import os from iota2.Common.OtbAppBank import CreateConcatenateImagesApplication from iota2.Common.FileUtils import ensure_dir # needed to travel throught iota2's library app_dep = [] preprocessed_dates = self.preprocess(working_dir=None, ram=str(ram)) dates_masks = [] if self.write_dates_stack is False: for _, dico_date in list(preprocessed_dates.items()): mask_app, mask_app_dep = dico_date["mask"] mask_app.Execute() dates_masks.append(mask_app) app_dep.append(mask_app) app_dep.append(mask_app_dep) else: dates_masks = self.get_available_dates_masks() time_series_dir = os.path.join(self.features_dir, "tmp") ensure_dir(time_series_dir, raise_exe=False) times_series_mask = os.path.join(time_series_dir, self.time_series_masks_name) dates_time_series_mask = CreateConcatenateImagesApplication({ "il": dates_masks, "out": times_series_mask, "pixType": "uint8", "ram": str(ram) }) return dates_time_series_mask, app_dep, len(dates_masks)
def commonMasks(tile_name, output_path, sensors_parameters, working_directory=None, RAM=128): """ compute common mask considering all sensors by tile Parameters ---------- tile_name [string] tile's name config_path [string] absolute path to the configuration file output_path : str iota2 output path working_directory [string] absolute path to a working directory RAM [int] pipeline's size (Mo) """ import os from iota2.Sensors.Sensors_container import sensors_container from iota2.Common.Utils import run from iota2.Common.FileUtils import ensure_dir # running_parameters = iota2_parameters(config_path) # sensors_parameters = running_parameters.get_sensors_parameters(tile_name) remote_sensor_container = sensors_container(tile_name, working_directory, output_path, **sensors_parameters) common_mask, _ = remote_sensor_container.get_common_sensors_footprint( available_ram=RAM) common_mask_raster = common_mask.GetParameterValue("out") if not os.path.exists(common_mask_raster): ensure_dir(os.path.split(common_mask_raster)[0], raise_exe=False) common_mask.ExecuteAndWriteOutput() common_mask_vector = common_mask_raster.replace(".tif", ".shp") common_mask_vector_cmd = "gdal_polygonize.py -f \"ESRI Shapefile\" -mask {} {} {}".format( common_mask_raster, common_mask_raster, common_mask_vector) run(common_mask_vector_cmd)
def get_time_series(self, ram=128): """ TODO : be able of using a date interval Return ------ list [(otb_Application, some otb's objects), time_series_labels] Functions dealing with otb's application instance has to returns every objects in the pipeline """ import os from iota2.Common.OtbAppBank import CreateConcatenateImagesApplication from iota2.Common.OtbAppBank import CreateSuperimposeApplication from iota2.Common.FileUtils import ensure_dir from iota2.Common.FileUtils import getRasterProjectionEPSG from iota2.Common.FileUtils import FileSearch_AND # needed to travel throught iota2's library app_dep = [] input_dates = [ os.path.join(self.tile_directory, cdir) for cdir in os.listdir(self.tile_directory) ] input_dates = self.sort_dates_directories(input_dates) # get date's data date_data = [] for date_dir in input_dates: l5_old_date = FileSearch_AND(date_dir, True, self.data_type, ".TIF")[0] if self.vhr_path.lower() != "none": l5_old_date = FileSearch_AND(date_dir, True, self.data_type, "COREG", ".TIF")[0] date_data.append(l5_old_date) time_series_dir = os.path.join(self.features_dir, "tmp") ensure_dir(time_series_dir, raise_exe=False) times_series_raster = os.path.join(time_series_dir, self.time_series_name) dates_time_series = CreateConcatenateImagesApplication({ "il": date_data, "out": times_series_raster, "ram": str(ram) }) _, dates_in = self.write_dates_file() # build labels features_labels = [ f"{self.__class__.name}_{band_name}_{date}" for date in dates_in for band_name in self.stack_band_position ] # if not all bands must be used if self.extracted_bands: app_dep.append(dates_time_series) (dates_time_series, features_labels) = self.extract_bands_time_series( dates_time_series, dates_in, len(self.stack_band_position), self.extracted_bands, ram) origin_proj = getRasterProjectionEPSG(date_data[0]) if int(origin_proj) != int(self.target_proj): dates_time_series.Execute() app_dep.append(dates_time_series) self.generate_raster_ref(date_data[0]) dates_time_series, _ = CreateSuperimposeApplication({ "inr": self.ref_image, "inm": self.masks_rules, "out": times_series_raster, "ram": str(ram) }) return (dates_time_series, app_dep), features_labels
def get_features(self, ram=128, logger=LOGGER): """generate user features. Concatenates all of them """ from gdal import Warp from osgeo.gdalconst import GDT_Byte from iota2.Common.OtbAppBank import CreateConcatenateImagesApplication from iota2.Common.OtbAppBank import CreateSuperimposeApplication from iota2.Common.FileUtils import FileSearch_AND from iota2.Common.FileUtils import ensure_dir from iota2.Common.FileUtils import getRasterProjectionEPSG from iota2.Common.FileUtils import getRasterResolution from iota2.Common.FileUtils import getRasterNbands features_dir = os.path.join(self.features_dir, "tmp") ensure_dir(features_dir, raise_exe=False) features_out = os.path.join(features_dir, self.features_names) user_features_found = [] user_features_bands = [] for pattern in self.data_type: user_feature = FileSearch_AND(self.tile_directory, True, pattern) if user_feature: user_features_bands.append(getRasterNbands(user_feature[0])) user_features_found.append(user_feature[0]) else: msg = "WARNING : '{}' not found in {}".format( pattern, self.tile_directory) logger.error(msg) raise Exception(msg) user_feat_stack = CreateConcatenateImagesApplication({ "il": user_features_found, "ram": str(ram), "out": features_out }) base_ref = user_features_found[0] base_ref_projection = getRasterProjectionEPSG(base_ref) if not os.path.exists(self.ref_image): base_ref_res_x, _ = getRasterResolution(base_ref) Warp(self.ref_image, base_ref, multithread=True, format="GTiff", xRes=base_ref_res_x, yRes=base_ref_res_x, outputType=GDT_Byte, srcSRS="EPSG:{}".format(base_ref_projection), dstSRS="EPSG:{}".format(self.target_proj)) app_dep = [] if int(base_ref_projection) != (self.target_proj): user_feat_stack.Execute() app_dep.append(user_feat_stack) user_feat_stack, _ = CreateSuperimposeApplication({ "inr": self.ref_image, "inm": user_feat_stack, "out": features_out, "ram": str(ram) }) features_labels = [ "{}_band_{}".format(pattern, band_num) for pattern, nb_bands in zip(self.data_type, user_features_bands) for band_num in range(nb_bands) ] return (user_feat_stack, app_dep), features_labels
def coregister(in_src, inref, band, bandref, resample=1, step=256, minstep=16, minsiftpoints=40, iterate=1, prec=3, mode=2, datadir=None, pattern='*STACK.tif', datatype='S2', write_features=False, working_directory=None): """ register an image / a time series on a reference image Parameters ---------- insrc : string source raster inref : string reference raster band : int band number for the source raster bandref : int band number for the raster reference raster resample : boolean resample to reference raster resolution step : int initial step between the geobins minstep : int minimal step between the geobins when iterates minsiftpoints : int minimal number of sift points to perform the registration iterate : boolean argument to iterate with smaller geobin step to find more sift points prec : int precision between the source and reference image (in source pixel unit) mode : int registration mode, 1 : simple registration ; 2 : time series registration ; 3 : time series cascade registration (to do) datadir : string path to the data directory pattern : string pattern of the STACK files to register write_features : boolean argument to keep temporary files Note ------ This function use the OTB's application **OrthoRectification** and **SuperImpose** more documentation for `OrthoRectification <https://www.orfeo-toolbox.org/Applications/OrthoRectification.html>`_ and `SuperImpose <https://www.orfeo-toolbox.org/Applications/Superimpose.html>`_ """ from iota2.Common.FileUtils import ensure_dir path_wd = os.path.dirname( in_src) if not working_directory else working_directory if not os.path.exists(path_wd): ensure_dir(path_wd) src_clip = os.path.join(path_wd, 'tempSrcClip.tif') extract_roi_app = OtbAppBank.CreateExtractROIApplication({ "in": in_src, "mode": "fit", "mode.fit.im": inref, "out": src_clip, "pixType": "uint16" }) extract_roi_app.ExecuteAndWriteOutput() # SensorModel generation sensor_model = os.path.join(path_wd, 'SensorModel.geom') pmcm_app = OtbAppBank.CreatePointMatchCoregistrationModel({ "in": src_clip, "band1": band, "inref": inref, "bandref": bandref, "resample": resample, "precision": str(prec), "mfilter": "1", "backmatching": "1", "outgeom": sensor_model, "initgeobinstep": str(step), "mingeobinstep": str(minstep), "minsiftpoints": str(minsiftpoints), "iterate": iterate }) pmcm_app.ExecuteAndWriteOutput() # mode 1 : application on the source image if mode in (1, 3): out_src = os.path.join(path_wd, 'temp_file.tif') io_src = str(src_clip + '?&skipcarto=true&geom=' + sensor_model) dataset = gdal.Open(src_clip) prj = dataset.GetProjection() geo_trans = dataset.GetGeoTransform() srs = osr.SpatialReference() srs.ImportFromWkt(prj) code = srs.GetAuthorityCode(None) gsp = str(int(2 * round(max(abs(geo_trans[1]), abs(geo_trans[5]))))) dataset = None ortho_rec_app = OtbAppBank.CreateOrthoRectification({ "in": io_src, "io.out": out_src, "map": "epsg", "map.epsg.code": code, "opt.gridspacing": gsp, "pixType": "uint16" }) if write_features: ortho_rec_app[0].ExecuteAndWriteOutput() else: ortho_rec_app[0].Execute() ext = os.path.splitext(in_src)[1] final_ouput = os.path.join( path_wd, os.path.basename(in_src.replace(ext, ext.replace('.', '_COREG.')))) sup_imp_app = OtbAppBank.CreateSuperimposeApplication({ "inr": src_clip, "inm": ortho_rec_app[0], "out": final_ouput, "pixType": "uint16" }) sup_imp_app[0].ExecuteAndWriteOutput() shutil.move(final_ouput, in_src.replace(ext, ext.replace('.', '_COREG.'))) shutil.move(final_ouput.replace(ext, '.geom'), in_src.replace(ext, '_COREG.geom')) # Mask registration if exists masks = glob.glob( os.path.dirname(in_src) + os.sep + 'MASKS' + os.sep + '*BINARY_MASK' + ext) if len(masks) != 0: for mask in masks: src_clip = os.path.join(path_wd, 'tempSrcClip.tif') extract_roi_app = OtbAppBank.CreateExtractROIApplication({ "in": mask, "mode": "fit", "mode.fit.im": inref, "out": src_clip, "pixType": "uint16" }) extract_roi_app.ExecuteAndWriteOutput() out_src = os.path.join(path_wd, 'temp_file.tif') io_src = str(mask + '?&skipcarto=true&geom=' + sensor_model) ortho_rec_app = OtbAppBank.CreateOrthoRectification({ "in": io_src, "io.out": out_src, "map": "epsg", "map.epsg.code": code, "opt.gridspacing": gsp, "pixType": "uint16" }) if write_features: ortho_rec_app[0].ExecuteAndWriteOutput() else: ortho_rec_app[0].Execute() ext = os.path.splitext(in_src)[1] final_mask = os.path.join( path_wd, os.path.basename( mask.replace(ext, ext.replace('.', '_COREG.')))) sup_imp_app = OtbAppBank.CreateSuperimposeApplication({ "inr": mask, "inm": ortho_rec_app[0], "out": final_mask, "pixType": "uint16" }) sup_imp_app[0].ExecuteAndWriteOutput() if final_mask != mask.replace(ext, ext.replace('.', '_COREG.')): shutil.move(final_mask, mask.replace(ext, ext.replace('.', '_COREG.'))) shutil.move(final_mask.replace(ext, '.geom'), mask.replace(ext, '_COREG.geom')) if mode == 3: folders = glob.glob(os.path.join(datadir, '*')) if datatype in ['S2', 'S2_S2C']: dates = [ os.path.basename(fld).split('_')[1].split("-")[0] for fld in folders ] ref_date = os.path.basename(in_src).split('_')[1].split("-")[0] elif datatype in ['L5', 'L8']: dates = [ os.path.basename(fld).split('_')[3] for fld in folders ] ref_date = os.path.basename(in_src).split('_')[3] dates.sort() ref_date_ind = dates.index(ref_date) bandref = band clean_dates = [ref_date] for curr_date in reversed(dates[:ref_date_ind]): inref = glob.glob( os.path.join(datadir, '*' + clean_dates[-1] + '*', pattern))[0] insrc = glob.glob( os.path.join(datadir, '*' + curr_date + '*', pattern))[0] src_clip = os.path.join(path_wd, 'srcClip.tif') extract_roi_app = OtbAppBank.CreateExtractROIApplication({ "in": insrc, "mode": "fit", "mode.fit.im": inref, "out": src_clip, "pixType": "uint16" }) extract_roi_app.ExecuteAndWriteOutput() out_sensor_model = os.path.join( path_wd, 'SensorModel_%s.geom' % curr_date) try: pmcm_app = OtbAppBank.CreatePointMatchCoregistrationModel({ "in": src_clip, "band1": band, "inref": inref, "bandref": bandref, "resample": resample, "precision": str(prec), "mfilter": "1", "backmatching": "1", "outgeom": out_sensor_model, "initgeobinstep": str(step), "mingeobinstep": str(minstep), "minsiftpoints": str(minsiftpoints), "iterate": iterate }) pmcm_app.ExecuteAndWriteOutput() except RuntimeError: shutil.copy(sensor_model, out_sensor_model) LOGGER.warning( 'Coregistration failed, %s will be process with %s' % (insrc, out_sensor_model)) continue out_src = os.path.join(path_wd, 'temp_file.tif') io_src = str(src_clip + '?&skipcarto=true&geom=' + out_sensor_model) dataset = gdal.Open(src_clip) prj = dataset.GetProjection() geo_trans = dataset.GetGeoTransform() srs = osr.SpatialReference() srs.ImportFromWkt(prj) code = srs.GetAuthorityCode(None) gsp = str( int(2 * round(max(abs(geo_trans[1]), abs(geo_trans[5]))))) dataset = None try: ortho_rec_app = OtbAppBank.CreateOrthoRectification({ "in": io_src, "io.out": out_src, "map": "epsg", "map.epsg.code": code, "opt.gridspacing": gsp, "pixType": "uint16" }) if write_features: ortho_rec_app[0].ExecuteAndWriteOutput() else: ortho_rec_app[0].Execute() except RuntimeError: os.remove(out_sensor_model) shutil.copy(sensor_model, out_sensor_model) LOGGER.warning( 'Coregistration failed, %s will be process with %s' % (insrc, out_sensor_model)) ortho_rec_app = OtbAppBank.CreateOrthoRectification({ "in": io_src, "io.out": out_src, "map": "epsg", "map.epsg.code": code, "opt.gridspacing": gsp, "pixType": "uint16" }) continue if write_features: ortho_rec_app[0].ExecuteAndWriteOutput() else: ortho_rec_app[0].Execute() ext = os.path.splitext(insrc)[1] final_ouput = os.path.join( path_wd, os.path.basename( insrc.replace(ext, ext.replace('.', '_COREG.')))) sup_imp_app = OtbAppBank.CreateSuperimposeApplication({ "inr": src_clip, "inm": ortho_rec_app[0], "out": final_ouput, "pixType": "uint16" }) sup_imp_app[0].ExecuteAndWriteOutput() shutil.move(final_ouput, insrc.replace(ext, ext.replace('.', '_COREG.'))) shutil.move(final_ouput.replace(ext, '.geom'), insrc.replace(ext, '_COREG.geom')) # Mask registration if exists masks = glob.glob( os.path.dirname(insrc) + os.sep + 'MASKS' + os.sep + '*BINARY_MASK' + ext) if len(masks) != 0: for mask in masks: src_clip = os.path.join(path_wd, 'srcClip.tif') extract_roi_app = OtbAppBank.CreateExtractROIApplication( { "in": mask, "mode": "fit", "mode.fit.im": inref, "out": src_clip, "pixType": "uint16" }) extract_roi_app.ExecuteAndWriteOutput() out_src = os.path.join(path_wd, 'temp_file.tif') io_src = str(src_clip + '?&skipcarto=true&geom=' + out_sensor_model) ortho_rec_app = OtbAppBank.CreateOrthoRectification({ "in": io_src, "io.out": out_src, "map": "epsg", "map.epsg.code": code, "opt.gridspacing": gsp, "pixType": "uint16" }) if write_features: ortho_rec_app[0].ExecuteAndWriteOutput() else: ortho_rec_app[0].Execute() ext = os.path.splitext(insrc)[1] final_mask = os.path.join( path_wd, os.path.basename( mask.replace(ext, ext.replace('.', '_COREG.')))) sup_imp_app = OtbAppBank.CreateSuperimposeApplication({ "inr": src_clip, "inm": ortho_rec_app[0], "out": final_mask, "pixType": "uint16" }) sup_imp_app[0].ExecuteAndWriteOutput() shutil.move( final_mask, mask.replace(ext, ext.replace('.', '_COREG.'))) shutil.move(final_mask.replace(ext, '.geom'), mask.replace(ext, '_COREG.geom')) if not write_features and os.path.exists(out_sensor_model): os.remove(out_sensor_model) if datatype in ['S2', 'S2_S2C']: mtd_file = glob.glob( os.path.join(os.path.dirname(insrc), '*_MTD_ALL*'))[0] cloud_clear = get_s2_tile_cloud_cover(mtd_file) cover = get_s2_tile_coverage(mtd_file) if cloud_clear > 0.6 and cover > 0.8: clean_dates.append(curr_date) elif datatype in ['L5', 'L8']: mlt_file = glob.glob( os.path.join(os.path.dirname(insrc), '*_MTL*'))[0] cloud_clear = get_l8_tile_cloud_cover(mlt_file) if cloud_clear > 0.6: clean_dates.append(curr_date) clean_dates = [ref_date] for curr_date in dates[ref_date_ind + 1:]: inref = glob.glob( os.path.join(datadir, '*' + clean_dates[-1] + '*', pattern))[0] insrc = glob.glob( os.path.join(datadir, '*' + curr_date + '*', pattern))[0] src_clip = os.path.join(path_wd, 'srcClip.tif') extract_roi_app = OtbAppBank.CreateExtractROIApplication({ "in": insrc, "mode": "fit", "mode.fit.im": inref, "out": src_clip, "pixType": "uint16" }) extract_roi_app.ExecuteAndWriteOutput() out_sensor_model = os.path.join( path_wd, 'SensorModel_%s.geom' % curr_date) try: pmcm_app = OtbAppBank.CreatePointMatchCoregistrationModel({ "in": src_clip, "band1": band, "inref": inref, "bandref": bandref, "resample": resample, "precision": str(prec), "mfilter": "1", "backmatching": "1", "outgeom": out_sensor_model, "initgeobinstep": str(step), "mingeobinstep": str(minstep), "minsiftpoints": str(minsiftpoints), "iterate": iterate }) pmcm_app.ExecuteAndWriteOutput() except RuntimeError: shutil.copy(sensor_model, out_sensor_model) LOGGER.warning( f'Coregistration failed, {insrc} will be process ' f'with {out_sensor_model}') continue out_src = os.path.join(path_wd, 'temp_file.tif') io_src = str(src_clip + '?&skipcarto=true&geom=' + out_sensor_model) dataset = gdal.Open(src_clip) prj = dataset.GetProjection() geo_trans = dataset.GetGeoTransform() srs = osr.SpatialReference() srs.ImportFromWkt(prj) code = srs.GetAuthorityCode(None) gsp = str( int(2 * round(max(abs(geo_trans[1]), abs(geo_trans[5]))))) dataset = None try: ortho_rec_app = OtbAppBank.CreateOrthoRectification({ "in": io_src, "io.out": out_src, "map": "epsg", "map.epsg.code": code, "opt.gridspacing": gsp, "pixType": "uint16" }) if write_features: ortho_rec_app[0].ExecuteAndWriteOutput() else: ortho_rec_app[0].Execute() except RuntimeError: os.remove(out_sensor_model) shutil.copy(sensor_model, out_sensor_model) ortho_rec_app = OtbAppBank.CreateOrthoRectification({ "in": io_src, "io.out": out_src, "map": "epsg", "map.epsg.code": code, "opt.gridspacing": gsp, "pixType": "uint16" }) continue if write_features: ortho_rec_app[0].ExecuteAndWriteOutput() else: ortho_rec_app[0].Execute() ext = os.path.splitext(insrc)[1] final_ouput = os.path.join( path_wd, os.path.basename( insrc.replace(ext, ext.replace('.', '_COREG.')))) sup_imp_app = OtbAppBank.CreateSuperimposeApplication({ "inr": src_clip, "inm": ortho_rec_app[0], "out": final_ouput, "pixType": "uint16" }) sup_imp_app[0].ExecuteAndWriteOutput() shutil.move(final_ouput, insrc.replace(ext, ext.replace('.', '_COREG.'))) shutil.move(final_ouput.replace(ext, '.geom'), insrc.replace(ext, '_COREG.geom')) # Mask registration if exists masks = glob.glob( os.path.dirname(insrc) + os.sep + 'MASKS' + os.sep + '*BINARY_MASK' + ext) if len(masks) != 0: for mask in masks: src_clip = os.path.join(path_wd, 'srcClip.tif') extract_roi_app = OtbAppBank.CreateExtractROIApplication( { "in": mask, "mode": "fit", "mode.fit.im": inref, "out": src_clip, "pixType": "uint16" }) extract_roi_app.ExecuteAndWriteOutput() out_src = os.path.join(path_wd, 'temp_file.tif') io_src = str(src_clip + '?&skipcarto=true&geom=' + out_sensor_model) ortho_rec_app = OtbAppBank.CreateOrthoRectification({ "in": io_src, "io.out": out_src, "map": "epsg", "map.epsg.code": code, "opt.gridspacing": gsp, "pixType": "uint16" }) if write_features: ortho_rec_app[0].ExecuteAndWriteOutput() else: ortho_rec_app[0].Execute() ext = os.path.splitext(insrc)[1] final_mask = os.path.join( path_wd, os.path.basename( mask.replace(ext, ext.replace('.', '_COREG.')))) sup_imp_app = OtbAppBank.CreateSuperimposeApplication({ "inr": src_clip, "inm": ortho_rec_app[0], "out": final_mask, "pixType": "uint16" }) sup_imp_app[0].ExecuteAndWriteOutput() shutil.move( final_mask, mask.replace(ext, ext.replace('.', '_COREG.'))) shutil.move(final_mask.replace(ext, '.geom'), mask.replace(ext, '_COREG.geom')) if not write_features and os.path.exists(out_sensor_model): os.remove(out_sensor_model) if datatype in ['S2', 'S2_S2C']: mtd_file = glob.glob( os.path.join(os.path.dirname(insrc), '*_MTD_ALL*'))[0] cloud_clear = get_s2_tile_cloud_cover(mtd_file) cover = get_s2_tile_coverage(mtd_file) if cloud_clear > 0.6 and cover > 0.8: clean_dates.append(curr_date) elif datatype in ['L5', 'L8']: mlt_file = glob.glob( os.path.join(os.path.dirname(insrc), '*_MTL*'))[0] cloud_clear = get_l8_tile_cloud_cover(mlt_file) if cloud_clear > 0.6: clean_dates.append(date) if not write_features and os.path.exists(sensor_model): os.remove(sensor_model) # mode 2 : application on the time series elif mode == 2: ext = os.path.splitext(in_src)[1] file_list = glob.glob(datadir + os.sep + '*' + os.sep + pattern) for insrc in file_list: src_clip = os.path.join(path_wd, 'tempSrcClip.tif') extract_roi_app = OtbAppBank.CreateExtractROIApplication({ "in": insrc, "mode": "fit", "mode.fit.im": inref, "out": src_clip, "pixType": "uint16" }) extract_roi_app.ExecuteAndWriteOutput() out_src = os.path.join(path_wd, 'temp_file.tif') io_src = str(src_clip + '?&skipcarto=true&geom=' + sensor_model) dataset = gdal.Open(src_clip) prj = dataset.GetProjection() geo_trans = dataset.GetGeoTransform() srs = osr.SpatialReference() srs.ImportFromWkt(prj) code = srs.GetAuthorityCode(None) gsp = str(int(2 * round(max(abs(geo_trans[1]), abs(geo_trans[5]))))) dataset = None ortho_rec_app = OtbAppBank.CreateOrthoRectification({ "in": io_src, "io.out": out_src, "map": "epsg", "map.epsg.code": code, "opt.gridspacing": gsp, "pixType": "uint16" }) if write_features: ortho_rec_app[0].ExecuteAndWriteOutput() else: ortho_rec_app[0].Execute() ext = os.path.splitext(insrc)[1] final_ouput = os.path.join( path_wd, os.path.basename( insrc.replace(ext, ext.replace('.', '_COREG.')))) sup_imp_app = OtbAppBank.CreateSuperimposeApplication({ "inr": src_clip, "inm": ortho_rec_app[0], "out": final_ouput, "pixType": "uint16" }) sup_imp_app[0].ExecuteAndWriteOutput() shutil.move(final_ouput, insrc.replace(ext, ext.replace('.', '_COREG.'))) shutil.move(final_ouput.replace(ext, '.geom'), insrc.replace(ext, '_COREG.geom')) # Mask registration if exists masks = glob.glob( os.path.dirname(insrc) + os.sep + 'MASKS' + os.sep + '*BINARY_MASK*' + ext) if len(masks) != 0: for mask in masks: src_clip = os.path.join(path_wd, 'tempSrcClip.tif') extract_roi_app = OtbAppBank.CreateExtractROIApplication({ "in": mask, "mode": "fit", "mode.fit.im": inref, "out": src_clip, "pixType": "uint16" }) extract_roi_app.ExecuteAndWriteOutput() out_src = os.path.join(path_wd, 'temp_file.tif') io_src = str(src_clip + '?&skipcarto=true&geom=' + sensor_model) ortho_rec_app = OtbAppBank.CreateOrthoRectification({ "in": io_src, "io.out": out_src, "map": "epsg", "map.epsg.code": code, "opt.gridspacing": gsp, "pixType": "uint16" }) if write_features: ortho_rec_app[0].ExecuteAndWriteOutput() else: ortho_rec_app[0].Execute() ext = os.path.splitext(insrc)[1] final_mask = os.path.join( path_wd, os.path.basename( mask.replace(ext, ext.replace('.', '_COREG.')))) sup_imp_app = OtbAppBank.CreateSuperimposeApplication({ "inr": src_clip, "inm": ortho_rec_app[0], "out": final_mask, "pixType": "uint16" }) sup_imp_app[0].ExecuteAndWriteOutput() shutil.move(final_mask, mask.replace(ext, ext.replace('.', '_COREG.'))) shutil.move(final_mask.replace(ext, '.geom'), mask.replace(ext, '_COREG.geom')) os.remove(src_clip) if not write_features and os.path.exists(sensor_model): os.remove(sensor_model)
def slicSegmentation(tile_name: str, output_path: str, sensors_parameters: sensors_params, ram: Optional[int] = 128, working_dir: Optional[Union[str, None]] = None, force_spw: Optional[Union[int, None]] = None, logger=LOGGER): """generate segmentation using SLIC algorithm Parameters ---------- tile_name : string tile's name output_path : string iota2 output path sensors_parameters : dict sensors parameters description ram : int available ram working_dir : string directory to store temporary data force_spw : int force segments' spatial width logger : logging root logger """ import math import shutil from iota2.Common.GenerateFeatures import generateFeatures from iota2.Common.OtbAppBank import CreateSLICApplication from iota2.Common.OtbAppBank import getInputParameterOutput from iota2.Common.FileUtils import ensure_dir SLIC_NAME = "SLIC_{}.tif".format(tile_name) all_features, feat_labels, dep = generateFeatures( working_dir, tile_name, sar_optical_post_fusion=False, output_path=output_path, sensors_parameters=sensors_parameters, mode="usually") all_features.Execute() spx, _ = all_features.GetImageSpacing( getInputParameterOutput(all_features)) tmp_dir = working_dir if working_dir is None: tmp_dir = os.path.join(output_path, "features", tile_name, "tmp", "SLIC_TMPDIR") else: tmp_dir = os.path.join(working_dir, tile_name) ensure_dir(tmp_dir) slic_seg_path = os.path.join(output_path, "features", tile_name, "tmp", SLIC_NAME) features_ram_estimation = all_features.PropagateRequestedRegion( key="out", region=all_features.GetImageRequestedRegion("out")) # increase estimation... features_ram_estimation = features_ram_estimation * 1.5 xy_tiles = math.ceil( math.sqrt(float(features_ram_estimation) / (float(ram) * 1024**2))) slic_parameters = { "in": all_features, "tmpdir": tmp_dir, "spw": force_spw if force_spw else int(spx), "tiling": "manual", "tiling.manual.ny": int(xy_tiles), "tiling.manual.nx": int(xy_tiles), "out": slic_seg_path } slic_seg = CreateSLICApplication(slic_parameters) if not os.path.exists(slic_seg_path): logger.info("Processing SLIC segmentation : {}\n\t\t\ with parameters : {}".format(tile_name, slic_parameters)) slic_seg.ExecuteAndWriteOutput() if working_dir is None: shutil.rmtree(tmp_dir)
def get_features(self, ram=128): """ get features """ import os from iota2.Common.OtbAppBank import CreateConcatenateImagesApplication from iota2.Common.OtbAppBank import computeUserFeatures from iota2.Common.OtbAppBank import CreateIota2FeatureExtractionApplication from iota2.Common.FileUtils import ensure_dir features_dir = os.path.join(self.features_dir, "tmp") ensure_dir(features_dir, raise_exe=False) features_out = os.path.join(features_dir, self.features_names) ((in_stack, in_stack_dep), in_stack_features_labels) = self.get_time_series_gapfilling() _, dates_enabled = self.write_interpolation_dates_file() if not self.enable_gapfilling: (in_stack, in_stack_dep), in_stack_features_labels = self.get_time_series() _, dates_enabled = self.write_dates_file() in_stack.Execute() app_dep = [] if self.hand_features_flag: hand_features = self.hand_features comp = (len(self.stack_band_position) if not self.extracted_bands else len(self.extracted_bands)) (user_date_features, fields_userfeat, user_feat_date, stack) = computeUserFeatures(in_stack, dates_enabled, comp, hand_features.split(",")) user_date_features.Execute() app_dep.append([user_date_features, user_feat_date, stack]) if self.features: bands_avail = self.stack_band_position if self.extracted_bands: bands_avail = [ band_name for band_name, _ in self.extracted_bands ] # check mandatory bands if "B4" not in bands_avail: raise Exception( "red band (B4) is needed to compute features") if "B5" not in bands_avail: raise Exception( "nir band (B5) is needed to compute features") if "B6" not in bands_avail: raise Exception( "swir band (B6) is needed to compute features") feat_parameters = { "in": in_stack, "out": features_out, "comp": len(bands_avail), "red": bands_avail.index("B4") + 1, "nir": bands_avail.index("B5") + 1, "swir": bands_avail.index("B6") + 1, "copyinput": self.copy_input, "relrefl": self.rel_refl, "keepduplicates": self.keep_dupl, "acorfeat": self.acorfeat, "pixType": "int16", "ram": str(ram), } features_app = CreateIota2FeatureExtractionApplication( feat_parameters) if self.copy_input is False: in_stack_features_labels = [] features_labels = ( in_stack_features_labels + self.get_features_labels(dates_enabled, self.rel_refl, self.keep_dupl, self.copy_input)) else: features_app = in_stack features_labels = in_stack_features_labels app_dep.append([in_stack, in_stack_dep]) if self.hand_features_flag: features_app.Execute() app_dep.append(features_app) features_app = CreateConcatenateImagesApplication({ "il": [features_app, user_date_features], "out": features_out, "ram": str(ram), }) features_labels += fields_userfeat return (features_app, app_dep), features_labels
def validity(tile_name, config_path, output_path, maskOut_name, view_threshold, workingDirectory=None, RAM=128): """ function dedicated to compute validity raster/vector by tile Parameters ---------- tile_name [string] tile's name config_path [string] absolute path to the configuration file maskOut_name [string] output vector mask's name view_threshold [int] threshold working_directory [string] absolute path to a working directory RAM [int] pipeline's size (Mo) """ import os import shutil from iota2.Common.ServiceConfigFile import iota2_parameters from iota2.Sensors.Sensors_container import sensors_container from iota2.Common.OtbAppBank import CreateConcatenateImagesApplication from iota2.Common.OtbAppBank import CreateBandMathApplication from iota2.Common.Utils import run from iota2.Common.FileUtils import erodeShapeFile from iota2.Common.FileUtils import removeShape from iota2.Common.FileUtils import ensure_dir features_dir = os.path.join(output_path, "features", tile_name) validity_name = "nbView.tif" validity_out = os.path.join(features_dir, validity_name) validity_processing = validity_out if workingDirectory: ensure_dir(os.path.join(workingDirectory, tile_name)) validity_processing = os.path.join(workingDirectory, tile_name, validity_name) running_parameters = iota2_parameters(config_path) sensors_parameters = running_parameters.get_sensors_parameters(tile_name) remote_sensor_container = sensors_container(tile_name, workingDirectory, output_path, **sensors_parameters) sensors_time_series_masks = remote_sensor_container.get_sensors_time_series_masks( available_ram=RAM) sensors_masks_size = [] sensors_masks = [] for sensor_name, (time_series_masks, time_series_dep, nb_bands) in sensors_time_series_masks: if sensor_name.lower() == "sentinel1": for _, time_series_masks_app in list(time_series_masks.items()): time_series_masks_app.Execute() sensors_masks.append(time_series_masks_app) else: time_series_masks.Execute() sensors_masks.append(time_series_masks) sensors_masks_size.append(nb_bands) total_dates = sum(sensors_masks_size) merge_masks = CreateConcatenateImagesApplication({ "il": sensors_masks, "ram": str(RAM) }) merge_masks.Execute() validity_app = CreateBandMathApplication({ "il": merge_masks, "exp": "{}-({})".format( total_dates, "+".join(["im1b{}".format(i + 1) for i in range(total_dates)])), "ram": str(0.7 * RAM), "pixType": "uint8" if total_dates < 255 else "uint16", "out": validity_processing }) if not os.path.exists(os.path.join(features_dir, validity_name)): validity_app.ExecuteAndWriteOutput() if workingDirectory: shutil.copy(validity_processing, os.path.join(features_dir, validity_name)) threshold_raster_out = os.path.join(features_dir, maskOut_name.replace(".shp", ".tif")) threshold_vector_out_tmp = os.path.join( features_dir, maskOut_name.replace(".shp", "_TMP.shp")) threshold_vector_out = os.path.join(features_dir, maskOut_name) input_threshold = validity_processing if os.path.exists( validity_processing) else validity_out threshold_raster = CreateBandMathApplication({ "il": input_threshold, "exp": "im1b1>={}?1:0".format(view_threshold), "ram": str(0.7 * RAM), "pixType": "uint8", "out": threshold_raster_out }) threshold_raster.ExecuteAndWriteOutput() cmd_poly = f"gdal_polygonize.py -mask {threshold_raster_out} {threshold_raster_out} -f \"ESRI Shapefile\" {threshold_vector_out_tmp} {os.path.splitext(os.path.basename(threshold_vector_out_tmp))[0]} cloud" run(cmd_poly) erodeShapeFile(threshold_vector_out_tmp, threshold_vector_out, 0.1) os.remove(threshold_raster_out) removeShape(threshold_vector_out_tmp.replace(".shp", ""), [".prj", ".shp", ".dbf", ".shx"])
def get_time_series_gapfilling(self, ram=128): """ get_time_series_gapfilling """ import os import multiprocessing as mp from iota2.Common.OtbAppBank import CreateImageTimeSeriesGapFillingApplication from iota2.Common.OtbAppBank import getInputParameterOutput from iota2.Common.FileUtils import ensure_dir from iota2.Common.OtbAppBank import executeApp gap_dir = os.path.join(self.features_dir, "tmp") ensure_dir(gap_dir, raise_exe=False) gap_out = os.path.join(gap_dir, self.time_series_gapfilling_name) dates_interp_file, dates_interp = self.write_interpolation_dates_file() dates_in_file, _ = self.write_dates_file() masks, masks_dep, _ = self.get_time_series_masks() (time_series, time_series_dep), _ = self.get_time_series() # inputs if self.write_outputs_flag is False: time_series.Execute() masks.Execute() else: time_series_raster = time_series.GetParameterValue( getInputParameterOutput(time_series)) masks_raster = masks.GetParameterValue( getInputParameterOutput(masks)) if not os.path.exists(masks_raster): multi_proc = mp.Process(target=executeApp, args=[masks]) multi_proc.start() multi_proc.join() if not os.path.exists(time_series_raster): # time_series.ExecuteAndWriteOutput() multi_proc = mp.Process(target=executeApp, args=[time_series]) multi_proc.start() multi_proc.join() if os.path.exists(masks_raster): masks = masks_raster if os.path.exists(time_series_raster): time_series = time_series_raster comp = len( self.stack_band_position) if not self.extracted_bands else len( self.extracted_bands) gap = CreateImageTimeSeriesGapFillingApplication({ "in": time_series, "mask": masks, "comp": str(comp), "it": "linear", "id": dates_in_file, "od": dates_interp_file, "out": gap_out, "ram": str(ram), "pixType": "int16" }) app_dep = [time_series, masks, masks_dep, time_series_dep] bands = self.stack_band_position if self.extracted_bands: bands = [band_name for band_name, band_pos in self.extracted_bands] features_labels = [ "{}_{}_{}".format(self.__class__.name, band_name, date) for date in dates_interp for band_name in bands ] return (gap, app_dep), features_labels
def preprocess_date_masks(self, date_dir, out_prepro, working_dir=None, ram=128, logger=LOGGER): """ preprocess date mask """ import os import shutil import glob import multiprocessing as mp from iota2.Common.FileUtils import ensure_dir from iota2.Common.OtbAppBank import CreateBandMathApplication from iota2.Common.OtbAppBank import CreateSuperimposeApplication from iota2.Common.FileUtils import getRasterProjectionEPSG from iota2.Common.OtbAppBank import executeApp # TODO : throw Exception if no masks are found date_mask = [] for mask_name, _ in list(self.masks_rules.items()): date_mask.append( glob.glob( os.path.join(date_dir, f"{self.struct_path_masks}{mask_name}"))[0]) # manage directories mask_dir = os.path.dirname(date_mask[0]) logger.debug(f"preprocessing {mask_dir} masks") mask_name = os.path.basename(date_mask[0]).replace( list(self.masks_rules.items())[0][0], "{}.tif".format(self.masks_date_suffix), ) out_mask = os.path.join(mask_dir, mask_name) if out_prepro: out_mask_dir = mask_dir.replace( os.path.join(self.l8_data, self.tile_name), out_prepro) ensure_dir(out_mask_dir, raise_exe=False) out_mask = os.path.join(out_mask_dir, mask_name) out_mask_processing = out_mask if working_dir: out_mask_processing = os.path.join(working_dir, mask_name) # build binary mask expr = "+".join([f"im{cpt+1}b1" for cpt in range(len(date_mask))]) expr = f"({expr})==0?0:1" binary_mask_rule = CreateBandMathApplication({ "il": date_mask, "exp": expr }) binary_mask_rule.Execute() # reproject using reference image superimp, _ = CreateSuperimposeApplication({ "inr": self.ref_image, "inm": binary_mask_rule, "interpolator": "nn", "out": out_mask_processing, "pixType": "uint8", "ram": str(ram) }) # needed to travel throught iota2's library app_dep = [binary_mask_rule] if self.write_dates_stack: same_proj = False if os.path.exists(out_mask): same_proj = int(getRasterProjectionEPSG(out_mask)) == int( self.target_proj) if not os.path.exists(out_mask) or same_proj is False: # ~ superimp.ExecuteAndWriteOutput() multi_proc = mp.Process(target=executeApp, args=[superimp]) multi_proc.start() multi_proc.join() if working_dir: shutil.copy(out_mask_processing, out_mask) os.remove(out_mask_processing) return superimp, app_dep
def preprocess_date(self, date_dir, out_prepro, working_dir=None, ram=128, logger=LOGGER): """ Preprocess each date """ import os import shutil from gdal import Warp import multiprocessing as mp from osgeo.gdalconst import GDT_Byte from collections import OrderedDict from iota2.Common.FileUtils import ensure_dir from iota2.Common.FileUtils import getRasterProjectionEPSG from iota2.Common.FileUtils import FileSearch_AND from iota2.Common.OtbAppBank import CreateConcatenateImagesApplication from iota2.Common.OtbAppBank import CreateSuperimposeApplication from iota2.Common.OtbAppBank import executeApp # manage directories date_stack_name = self.build_stack_date_name(date_dir) logger.debug(f"preprocessing {date_dir}") out_stack = os.path.join(date_dir, date_stack_name) if out_prepro: _, date_dir_name = os.path.split(date_dir) out_dir = os.path.join(out_prepro, date_dir_name) if not os.path.exists(out_dir): try: os.mkdir(out_dir) except OSError: logger.warning(f"{out_dir} already exists") out_stack = os.path.join(out_dir, date_stack_name) out_stack_processing = out_stack if working_dir: out_stack_processing = os.path.join(working_dir, date_stack_name) # get bands date_bands = [ FileSearch_AND(date_dir, True, "{}_{}.tif".format(self.data_type, bands_name))[0] for bands_name in self.stack_band_position ] # tile reference image generation base_ref = date_bands[0] ensure_dir(os.path.dirname(self.ref_image), raise_exe=False) base_ref_projection = getRasterProjectionEPSG(base_ref) if not os.path.exists(self.ref_image): logger.info( f"reference image generation {self.ref_image} from {base_ref}") Warp(self.ref_image, base_ref, multithread=True, format="GTiff", xRes=self.native_res, yRes=self.native_res, outputType=GDT_Byte, srcSRS="EPSG:{}".format(base_ref_projection), dstSRS="EPSG:{}".format(self.target_proj)) # reproject / resample bands_proj = OrderedDict() all_reproj = [] for band, band_name in zip(date_bands, self.stack_band_position): superimp, _ = CreateSuperimposeApplication({ "inr": self.ref_image, "inm": band, "ram": str(ram) }) bands_proj[band_name] = superimp all_reproj.append(superimp) if self.write_dates_stack: for reproj in all_reproj: reproj.Execute() date_stack = CreateConcatenateImagesApplication({ "il": all_reproj, "ram": str(ram), "pixType": "int16", "out": out_stack_processing }) same_proj = False if os.path.exists(out_stack): same_proj = int(getRasterProjectionEPSG(out_stack)) == int( self.target_proj) if not os.path.exists(out_stack) or same_proj is False: # ~ date_stack.ExecuteAndWriteOutput() multi_proc = mp.Process(target=executeApp, args=[date_stack]) multi_proc.start() multi_proc.join() if working_dir: shutil.copy(out_stack_processing, out_stack) os.remove(out_stack_processing) return bands_proj if self.write_dates_stack is False else out_stack
def get_time_series_masks(self, ram=128): """ get time series masks """ import os import glob from iota2.Common.OtbAppBank import CreateConcatenateImagesApplication from iota2.Common.OtbAppBank import CreateSuperimposeApplication from iota2.Common.OtbAppBank import CreateBandMathApplication from iota2.Common.FileUtils import ensure_dir from iota2.Common.FileUtils import getRasterProjectionEPSG time_series_dir = os.path.join(self.features_dir, "tmp") ensure_dir(time_series_dir, raise_exe=False) times_series_mask = os.path.join(time_series_dir, self.time_series_masks_name) # needed to travel throught iota2's library app_dep = [] input_dates = [ os.path.join(self.tile_directory, cdir) for cdir in os.listdir(self.tile_directory) ] input_dates = self.sort_dates_directories(input_dates) # get date's data date_data = [] div_mask_patter = list(self.masks_rules.keys())[self.border_pos] cloud_mask_patter = list(self.masks_rules.keys())[self.cloud_pos] sat_mask_patter = list(self.masks_rules.keys())[self.sat_pos] if self.vhr_path.lower() != "none": div_mask_patter = div_mask_patter.replace(".TIF", "_COREG.TIF") cloud_mask_patter = div_mask_patter.replace(".TIF", "_COREG.TIF") sat_mask_patter = div_mask_patter.replace(".TIF", "_COREG.TIF") for date_dir in input_dates: div_mask = glob.glob( os.path.join(date_dir, f"{self.struct_path_masks}{div_mask_patter}"))[0] cloud_mask = glob.glob( os.path.join( date_dir, f"{self.struct_path_masks}{cloud_mask_patter}"))[0] sat_mask = glob.glob( os.path.join(date_dir, f"{self.struct_path_masks}{sat_mask_patter}"))[0] # im1 = div, im2 = cloud, im3 = sat div_expr = "(1-(im1b1/2==rint(im1b1/2)))" cloud_expr = "im2b1" sat_expr = "im3b1" # expr = "*".join([div_expr, cloud_expr, sat_expr]) expr = f"({div_expr} + {cloud_expr} + {sat_expr})==0?0:1" date_binary_mask = CreateBandMathApplication({ "il": [div_mask, cloud_mask, sat_mask], "exp": expr }) date_binary_mask.Execute() date_data.append(date_binary_mask) app_dep.append(date_binary_mask) dates_time_series_mask = CreateConcatenateImagesApplication({ "il": date_data, "ram": str(ram), "out": times_series_mask }) origin_proj = getRasterProjectionEPSG(sat_mask) if int(origin_proj) != int(self.target_proj): dates_time_series_mask.Execute() app_dep.append(dates_time_series_mask) self.generate_raster_ref(sat_mask) dates_time_series_mask, _ = CreateSuperimposeApplication({ "inr": self.ref_image, "inm": dates_time_series_mask, "interpolator": "nn", "out": times_series_mask, "ram": str(ram) }) return dates_time_series_mask, app_dep, len(date_data)
def get_features(self, ram=128): """ get features """ import os import multiprocessing as mp from iota2.Common.OtbAppBank import CreateConcatenateImagesApplication from iota2.Common.OtbAppBank import computeUserFeatures from iota2.Common.OtbAppBank import CreateIota2FeatureExtractionApplication from iota2.Common.OtbAppBank import getInputParameterOutput from iota2.Common.FileUtils import ensure_dir from iota2.Common.OtbAppBank import executeApp features_dir = os.path.join(self.features_dir, "tmp") ensure_dir(features_dir, raise_exe=False) features_out = os.path.join(features_dir, self.features_names) # ~ features = self.cfg_IOTA2.getParam("GlobChain", "features") # ~ enable_gapFilling = self.cfg_IOTA2.getParam("GlobChain", # ~ "useGapFilling") # ~ hand_features_flag = self.cfg_IOTA2.getParam('GlobChain', # ~ 'useAdditionalFeatures') # input (in_stack, in_stack_dep ), in_stack_features_labels = self.get_time_series_gapfilling() _, dates_enabled = self.write_interpolation_dates_file() if not self.enable_gapfilling: (in_stack, in_stack_dep), in_stack_features_labels = self.get_time_series() _, dates_enabled = self.write_dates_file() if self.write_outputs_flag is False: in_stack.Execute() else: in_stack_raster = in_stack.GetParameterValue( getInputParameterOutput(in_stack)) if not os.path.exists(in_stack_raster): # in_stack.ExecuteAndWriteOutput() multi_proc = mp.Process(target=executeApp, args=[in_stack]) multi_proc.start() multi_proc.join() if os.path.exists(in_stack_raster): in_stack = in_stack_raster # output app_dep = [] if self.hand_features_flag: # ~ hand_features = self.cfg_IOTA2.getParam("Sentinel_2", # ~ "additionalFeatures") comp = len( self.stack_band_position) if not self.extracted_bands else len( self.extracted_bands) (user_date_features, fields_userfeat, user_feat_date, stack) = computeUserFeatures(in_stack, dates_enabled, comp, self.hand_features.split(",")) user_date_features.Execute() app_dep.append([user_date_features, user_feat_date, stack]) if self.features: bands_avail = self.stack_band_position if self.extracted_bands: bands_avail = [ band_name for band_name, _ in self.extracted_bands ] # check mandatory bands if "B4" not in bands_avail: raise Exception( "red band (B4) is needed to compute features") if "B8" not in bands_avail: raise Exception( "nir band (B8) is needed to compute features") if "B11" not in bands_avail: raise Exception( "swir band (B11) is needed to compute features") feat_parameters = { "in": in_stack, "out": features_out, "comp": len(bands_avail), "red": bands_avail.index("B4") + 1, "nir": bands_avail.index("B8") + 1, "swir": bands_avail.index("B11") + 1, "copyinput": self.copy_input, "relrefl": self.rel_refl, "keepduplicates": self.keep_dupl, "acorfeat": self.acorfeat, "pixType": "int16", "ram": str(ram) } features_app = CreateIota2FeatureExtractionApplication( feat_parameters) if self.copy_input is False: in_stack_features_labels = [] features_labels = ( in_stack_features_labels + self.get_features_labels(dates_enabled, self.rel_refl, self.keep_dupl, self.copy_input)) else: features_app = in_stack features_labels = in_stack_features_labels app_dep.append([in_stack, in_stack_dep]) if self.hand_features_flag: features_app.Execute() app_dep.append(features_app) features_app = CreateConcatenateImagesApplication({ "il": [features_app, user_date_features], "out": features_out, "ram": str(ram) }) features_labels += fields_userfeat return (features_app, app_dep), features_labels