示例#1
0
def get_tile_raster_footprint(tile_name: str, sensor_path: str,
                              proj_epsg_t: int) -> Union[None, ogr.Geometry]:
    """from a sensor path and a tile's name, get the tile's envelope
       as an ogr geometry
    """
    raster_ref = None
    geom_raster_envelope = None

    ref_path = os.path.join(sensor_path, tile_name)

    raster_ref_list = FileSearch_AND(ref_path, True, ".jp2") or FileSearch_AND(
        ref_path, True, ".tif") or FileSearch_AND(ref_path, True, ".tiff")
    for raster_ref in raster_ref_list:
        if "STACK.tif" not in raster_ref:
            break
    if raster_ref:
        proj_epsg_o = getRasterProjectionEPSG(raster_ref)
        min_x, max_x, min_y, max_y = getRasterExtent(raster_ref)
        geom_raster_envelope = extent_to_geom(min_x,
                                              max_x,
                                              min_y,
                                              max_y,
                                              src_epsg=proj_epsg_o,
                                              tgt_epsg=proj_epsg_t)
    return geom_raster_envelope
示例#2
0
def autoContext_classification_param(iota2_directory, data_field):
    """
    Parameters
    ----------
    iota2_run_dir : string
        path to iota² output path
    """
    import re
    from iota2.Common.FileUtils import FileSearch_AND
    from iota2.Common.FileUtils import getListTileFromModel
    from iota2.Common.FileUtils import getFieldElement

    models_description = os.path.join(iota2_directory, "config_model",
                                      "configModel.cfg")
    models_directory = os.path.join(iota2_directory, "model")
    sample_sel_directory = os.path.join(iota2_directory, "samplesSelection")

    parameters = []
    all_models = sorted(os.listdir(models_directory))
    for model in all_models:
        model_name = model.split("_")[1]
        seed_num = model.split("_")[-1]
        tiles = sorted(getListTileFromModel(model_name, models_description))
        #~ samples_region_1f1_seed_0.shp
        model_sample_sel = FileSearch_AND(
            sample_sel_directory, True,
            "samples_region_{}_seed_{}.shp".format(model_name, seed_num))[0]
        labels = getFieldElement(model_sample_sel,
                                 driverName="ESRI Shapefile",
                                 field=data_field,
                                 mode="unique")
        models = FileSearch_AND(os.path.join(models_directory, model), True,
                                ".rf")
        for tile in tiles:
            tile_mask = FileSearch_AND(
                os.path.join(iota2_directory, "shapeRegion"), True,
                "{}_{}.tif".format(model_name.split("f")[0], tile))[0]
            tile_seg = FileSearch_AND(
                os.path.join(iota2_directory, "features", tile, "tmp"), True,
                "SLIC_{}.tif".format(tile))[0]
            parameters.append({
                "model_name":
                model_name,
                "seed_num":
                seed_num,
                "tile":
                tile,
                "tile_segmentation":
                tile_seg,
                "tile_mask":
                tile_mask,
                "labels_list":
                labels,
                "model_list":
                sorted(models,
                       key=lambda x: int(
                           re.findall("\d", os.path.basename(x))[0]))
            })
    return parameters
示例#3
0
 def get_date_dir(self, date_dir, size):
     """
     get date dir
     """
     import os
     from iota2.Common.FileUtils import FileSearch_AND
     if size == 10:
         target_dir, _ = os.path.split(
             FileSearch_AND(date_dir, True, "10m.jp2")[0])
     elif size == 20:
         target_dir, _ = os.path.split(
             FileSearch_AND(date_dir, True, "B02_20m.jp2")[0])
     else:
         raise Exception("size not in [10, 20]")
     return target_dir
示例#4
0
def get_ordered_learning_samples(learning_directory: str) -> List[str]:
    """
    scan learning directory and return a list of files ordered considering
    model and seed

    Parameters
    ----------
    learning_directory : str
        path to the learning directory

    Return
    ------
    list
        list of path
    """
    import operator
    from iota2.Common.FileUtils import FileSearch_AND

    tile_position = 0
    model_position = 2
    seed_position = 3

    learning_files = FileSearch_AND(learning_directory, False,
                                    "_Samples_learn.sqlite")
    files_indexed = [
        (c_file.split("_")[model_position], c_file.split("_")[tile_position],
         int(c_file.split("_")[seed_position].replace("seed", "")),
         os.path.join(learning_directory, "{}.sqlite".format(c_file)))
        for c_file in learning_files
    ]
    files_indexed_sorted = sorted(files_indexed,
                                  key=operator.itemgetter(0, 1, 2))
    return [learning_file for _, _, _, learning_file in files_indexed_sorted]
示例#5
0
    def get_time_series_masks(self, ram=128, logger=LOGGER):
        """
        """
        from iota2.Common.OtbAppBank import CreateConcatenateImagesApplication
        from iota2.Common.FileUtils import ensure_dir
        from iota2.Common.FileUtils import FileSearch_AND

        time_series_dir = os.path.join(self.features_dir, "tmp")
        ensure_dir(time_series_dir, raise_exe=False)
        times_series_mask = os.path.join(time_series_dir,
                                         self.time_series_masks_name)

        # check patterns
        for pattern in self.data_type:
            user_feature = FileSearch_AND(self.tile_directory, True, pattern)
            if not user_feature:
                msg = "WARNING : '{}' not found in {}".format(
                    pattern, self.tile_directory)
                logger.error(msg)
                raise Exception(msg)
        nb_patterns = len(self.data_type)
        masks = []
        app_dep = []
        for _ in range(nb_patterns):
            dummy_mask, _ = self.footprint(data_value=0)
            dummy_mask.Execute()
            app_dep.append(dummy_mask)
            masks.append(dummy_mask)
        masks_stack = CreateConcatenateImagesApplication({
            "il": masks,
            "out": times_series_mask,
            "ram": str(ram)
        })

        return masks_stack, app_dep, nb_patterns
示例#6
0
    def footprint(self, ram=128):
        """
        footprint
        """
        import os
        import glob
        from iota2.Common.OtbAppBank import CreateSuperimposeApplication
        from iota2.Common.OtbAppBank import CreateBandMathApplication
        from iota2.Common.FileUtils import ensure_dir
        from iota2.Common.FileUtils import FileSearch_AND

        footprint_dir = os.path.join(self.features_dir, "tmp")
        ensure_dir(footprint_dir, raise_exe=False)
        footprint_out = os.path.join(footprint_dir, self.footprint_name)

        input_dates = [
            os.path.join(self.tile_directory, cdir)
            for cdir in os.listdir(self.tile_directory)
        ]
        input_dates = self.sort_dates_directories(input_dates)

        # get date's footprint
        date_edge = []
        for date_dir in input_dates:
            date_edge.append(
                glob.glob(
                    os.path.join(
                        date_dir, f"{self.struct_path_masks}"
                        f"{list(self.masks_rules.keys())[self.border_pos]}"))
                [0])

        self.generate_raster_ref(date_edge[0])

        # seek odd values, then sum it
        expr = [
            f"(im{i+1}b1/2==rint(im{i+1}b1/2))" for i in range(len(date_edge))
        ]
        expr = f"{'+'.join(expr)}>0?1:0"
        masks_rules = CreateBandMathApplication({
            "il": date_edge,
            "ram": str(ram),
            "exp": expr
        })
        masks_rules.Execute()
        app_dep = [masks_rules]

        reference_raster = self.ref_image
        if self.vhr_path.lower() != "none":
            reference_raster = FileSearch_AND(input_dates[0], True,
                                              self.data_type, "COREG",
                                              ".TIF")[0]

        superimp, _ = CreateSuperimposeApplication({
            "inr": reference_raster,
            "inm": masks_rules,
            "out": footprint_out,
            "pixType": "uint8",
            "ram": str(ram)
        })
        return superimp, app_dep
示例#7
0
def check_sqlite_db(i2_output_path):
    """check if every sqlite database could be open
    """
    # explicit call to the undecorate function thanks to __wrapped__
    return [
        ServiceError.sqliteCorrupted(elem) for elem in
        FileSearch_AND.__wrapped__(i2_output_path, True, "sqlite-journal")
    ]
示例#8
0
 def build_stack_date_name(self, date_dir):
     """build stack date name
     """
     import os
     from iota2.Common.FileUtils import FileSearch_AND
     _, b2_name = os.path.split(
         FileSearch_AND(date_dir, True, "FRE_B2.tif")[0])
     return b2_name.replace("{}_B2.tif".format(self.data_type),
                            "{}_{}.tif".format(self.data_type, self.suffix))
示例#9
0
 def build_date_name(self, date_dir, suffix):
     """
     build date name
     """
     import os
     from iota2.Common.FileUtils import FileSearch_AND
     _, b2_name = os.path.split(
         FileSearch_AND(date_dir, True, "B02_10m.jp2")[0])
     return b2_name.replace("B02_10m.jp2", "{}.tif".format(suffix))
示例#10
0
 def build_stack_date_name(self, date_dir):
     """
     build stack date name
     """
     import os
     from iota2.Common.FileUtils import FileSearch_AND
     _, b2_name = os.path.split(
         FileSearch_AND(date_dir, True, "FRC_B2.tif")[0])
     return b2_name.replace("FRC_B2.tif", f"FRC_{self.suffix}.tif")
示例#11
0
    def get_available_dates_masks(self):
        """
        return sorted available masks
        """
        import os
        from iota2.Common.FileUtils import FileSearch_AND

        masks = sorted(FileSearch_AND(self.output_preprocess_directory, True,
                                      f"{self.masks_date_suffix}.tif"),
                       key=lambda x: int(
                           os.path.basename(x).split("_")[self.date_position].
                           split("-")[0]))
        return masks
示例#12
0
 def get_available_dates_masks(self):
     """
     return sorted available masks
     """
     import os
     from iota2.Common.FileUtils import FileSearch_AND
     pattern = "{}.tif".format(self.masks_date_suffix)
     if self.vhr_path.lower() != "none":
         pattern = "{}_COREG.tif".format(self.suffix)
     masks = sorted(FileSearch_AND(self.output_preprocess_directory, True,
                                   "{}".format(pattern)),
                    key=lambda x: os.path.basename(x).split("_")[
                        self.date_position].split("-")[0])
     return masks
示例#13
0
    def footprint(self, ram=128, data_value=1):
        """get footprint
        """
        from gdal import Warp
        from osgeo.gdalconst import GDT_Byte
        from iota2.Common.FileUtils import FileSearch_AND
        from iota2.Common.OtbAppBank import CreateBandMathApplication
        from iota2.Common.FileUtils import ensure_dir
        from iota2.Common.FileUtils import getRasterProjectionEPSG
        from iota2.Common.FileUtils import getRasterResolution

        footprint_dir = os.path.join(self.features_dir, "tmp")
        ensure_dir(footprint_dir, raise_exe=False)
        footprint_out = os.path.join(footprint_dir, self.footprint_name)

        user_feature = FileSearch_AND(self.tile_directory, True,
                                      self.data_type[0])

        # tile reference image generation
        base_ref = user_feature[0]
        LOGGER.info("reference image generation {} from {}".format(
            self.ref_image, base_ref))
        ensure_dir(os.path.dirname(self.ref_image), raise_exe=False)
        base_ref_projection = getRasterProjectionEPSG(base_ref)
        base_ref_res_x, _ = getRasterResolution(base_ref)
        if not os.path.exists(self.ref_image):
            Warp(self.ref_image,
                 base_ref,
                 multithread=True,
                 format="GTiff",
                 xRes=base_ref_res_x,
                 yRes=base_ref_res_x,
                 outputType=GDT_Byte,
                 srcSRS="EPSG:{}".format(base_ref_projection),
                 dstSRS="EPSG:{}".format(self.target_proj))

        # user features must not contains NODATA
        # -> "exp" : 'data_value' mean every data available
        footprint = CreateBandMathApplication({
            "il": self.ref_image,
            "out": footprint_out,
            "exp": str(data_value),
            "pixType": "uint8",
            "ram": str(ram)
        })

        # needed to travel throught iota2's library
        app_dep = []

        return footprint, app_dep
示例#14
0
    def get_available_dates(self):
        """
        return sorted available dates
        """
        import os
        from iota2.Common.FileUtils import FileSearch_AND

        stacks = sorted(
            FileSearch_AND(self.output_preprocess_directory, True,
                           "{}.tif".format(self.suffix)),
            key=lambda x: int(
                os.path.basename(x).split("_")[self.date_position].split("-")[
                    0]),
        )
        return stacks
示例#15
0
    def footprint(self, ram=128):
        """
        compute footprint of images
        """
        import os
        from iota2.Common.OtbAppBank import CreateSuperimposeApplication
        from iota2.Common.OtbAppBank import CreateBandMathApplication
        from iota2.Common.FileUtils import ensure_dir
        from iota2.Common.FileUtils import FileSearch_AND

        footprint_dir = os.path.join(self.features_dir, "tmp")
        ensure_dir(footprint_dir, raise_exe=False)
        footprint_out = os.path.join(footprint_dir, self.footprint_name)

        input_dates = [
            os.path.join(self.tile_directory, cdir)
            for cdir in os.listdir(self.tile_directory)
        ]
        input_dates = self.sort_dates_directories(input_dates)
        all_scl = []
        for date_dir in input_dates:
            r20m_dir = self.get_date_dir(date_dir, 20)
            scl = FileSearch_AND(r20m_dir, True, self.scene_classif)[0]
            all_scl.append(scl)
        sum_scl = "+".join(
            ["im{}b1".format(i + 1) for i in range(len(all_scl))])
        edge = CreateBandMathApplication({
            "il": all_scl,
            "exp": "{}==0?0:1".format(sum_scl)
        })
        edge.Execute()
        app_dep = [edge]

        # superimpose footprint
        reference_raster = self.ref_image
        if self.vhr_path.lower() != "none":
            reference_raster = self.get_available_dates()[0]
        superimp, _ = CreateSuperimposeApplication({
            "inr": reference_raster,
            "inm": edge,
            "out": footprint_out,
            "pixType": "uint8",
            "ram": str(ram)
        })
        # needed to travel throught iota2's library
        app_dep.append(_)

        return superimp, app_dep
示例#16
0
    def get_available_dates(self):
        """
        return sorted available dates
        """
        import os
        from iota2.Common.FileUtils import FileSearch_AND

        pattern = f"{self.suffix}.tif"
        if self.vhr_path.lower() != "none":
            pattern = f"{self.suffix}_COREG.tif"

        stacks = sorted(FileSearch_AND(self.output_preprocess_directory, True,
                                       pattern),
                        key=lambda x: os.path.basename(x).split("_")[
                            self.date_position].split("-")[0])
        return stacks
示例#17
0
    def get_available_dates(self):
        """
        return sorted available dates
        """
        import os
        from iota2.Common.FileUtils import FileSearch_AND
        target_folder = self.tile_directory
        if self.output_preprocess_directory:
            target_folder = self.output_preprocess_directory

        pattern = "{}.tif".format(self.suffix)
        if self.vhr_path.lower() != "none":
            pattern = "{}_COREG.tif".format(self.suffix)

        stacks = sorted(FileSearch_AND(target_folder, True, pattern),
                        key=lambda x: os.path.basename(x).split("_")[
                            self.date_position].split("T")[0])
        return stacks
示例#18
0
 def step_inputs(self):
     """
     Return
     ------
         the return could be and iterable or a callable
     """
     from iota2.Common.FileUtils import FileSearch_AND
     sample_sel_directory = os.path.join(
         SCF.serviceConfigFile(self.cfg).getParam('chain', 'outputPath'),
         "samplesSelection")
     sampled_vectors = FileSearch_AND(sample_sel_directory, True,
                                      "selection.sqlite")
     tiles = []
     for sampled_vector in sampled_vectors:
         tile_name = os.path.splitext(os.path.basename(
             sampled_vector))[0].split("_")[self.tile_name_pos]
         if not tile_name in tiles and tile_name != "samples":
             tiles.append(tile_name)
     tiles = sorted(tiles)
     return tiles
示例#19
0
    def footprint(self, ram=128):
        """get sentinel_1 footprint
        """
        from iota2.Common.OtbAppBank import CreateBandMathApplication
        from iota2.Common.FileUtils import FileSearch_AND
        s1_border_masks = FileSearch_AND(self.output_processing, True,
                                         self.mask_pattern)

        sum_mask = "+".join(
            ["im{}b1".format(i + 1) for i in range(len(s1_border_masks))])
        expression = "{}=={}?0:1".format(sum_mask, len(s1_border_masks))
        raster_footprint = os.path.join(self.features_dir, "tmp",
                                        self.footprint_name)
        footprint_app = CreateBandMathApplication({
            "il": s1_border_masks,
            "out": raster_footprint,
            "exp": expression,
            "ram": str(ram)
        })
        footprint_app_dep = []
        return footprint_app, footprint_app_dep
示例#20
0
    def test_slic(self):
        """non-regression test, check if SLIC could be performed
        """
        from iota2.Common import IOTA2Directory
        from iota2.Common import ServiceConfigFile as SCF
        from iota2.Segmentation import segmentation
        from iota2.Common.FileUtils import FileSearch_AND
        from iota2.Sensors.Sensors_container import sensors_container

        # config file
        config_path_test = os.path.join(self.test_working_directory,
                                        "Config_TEST.cfg")
        test_path = self.generate_cfg_file(self.config_test, config_path_test)

        IOTA2Directory.generate_directories(test_path, check_inputs=False)

        slic_working_dir = os.path.join(self.test_working_directory,
                                        "slic_tmp")
        iota2_dico = SCF.iota2_parameters(
            config_path_test).get_sensors_parameters(self.tile_name)
        sensors = sensors_container(self.tile_name, None,
                                    self.test_working_directory, **iota2_dico)
        sensors.sensors_preprocess()

        # Launch test
        segmentation.slicSegmentation(self.tile_name,
                                      test_path,
                                      iota2_dico,
                                      ram=128,
                                      working_dir=slic_working_dir,
                                      force_spw=1)

        # as SLIC algorithm contains random variables, the raster's content
        # could not be tested
        self.assertTrue(len(
            FileSearch_AND(
                os.path.join(test_path, "features", self.tile_name, "tmp"),
                True, "SLIC_{}".format(self.tile_name))) == 1,
                        msg="SLIC algorithm failed")
示例#21
0
    def test_sk_cross_validation(self):
        """test cross validation
        """
        import shutil
        from iota2.Learning.TrainSkLearn import sk_learn
        from iota2.Common.FileUtils import FileSearch_AND

        _, db_file_name = os.path.split(self.features_dataBase)

        features_db_test = os.path.join(self.test_working_directory,
                                        db_file_name)
        shutil.copy(self.features_dataBase, features_db_test)

        test_model_path = os.path.join(self.test_working_directory,
                                       "test_model.rf")
        sk_learn(
            dataset_path=features_db_test,
            features_labels=["reduced_{}".format(cpt) for cpt in range(138)],
            model_path=test_model_path,
            data_field="code",
            sk_model_name="RandomForestClassifier",
            cv_parameters={'n_estimators': [50, 100]},
            min_samples_split=25)

        # asserts
        self.assertTrue(os.path.exists(test_model_path))
        test_cross_val_results = FileSearch_AND(self.test_working_directory,
                                                True, "cross_val_param.cv")[0]
        with open(test_cross_val_results, "r") as cross_val_f:
            test_cross_val = [line.rstrip() for line in cross_val_f]

        test_cv_val = all([
            val_to_find in test_cross_val
            for val_to_find in self.ref_cross_validation
        ])
        self.assertTrue(test_cv_val, msg="cross validation failed")
示例#22
0
def sk_classifications_to_merge(iota2_classif_directory: str
                                ) -> List[Dict[str, Union[str, List[str]]]]:
    """feed function merge_sk_classifications

    Parameters
    ----------
    iota2_classif_directory : str
        iota2's classification directory
    """
    import os
    from iota2.Common.FileUtils import FileSearch_AND
    from iota2.Common.FileUtils import sortByFirstElem

    model_pos_classif = 3
    seed_pos_classif = 5
    tile_pos_classif = 1

    model_pos_confidence = 2
    seed_pos_confidence = 5
    tile_pos_confidence = 0

    rasters_to_merge = []

    classifications = FileSearch_AND(iota2_classif_directory, True, "Classif",
                                     "_model_", "_seed_", "_SUBREGION_",
                                     ".tif")
    classif_to_merge = []
    for classification in classifications:
        model = os.path.basename(classification).split("_")[model_pos_classif]
        seed = os.path.basename(classification).split("_")[seed_pos_classif]
        tile_name = os.path.basename(classification).split(
            "_")[tile_pos_classif]
        suffix = ""
        if "_SAR_SUBREGION_" in classification:
            suffix = "_SAR_"
        classif_to_merge.append(
            ((model, seed, tile_name, suffix), classification))

    classif_to_merge = sortByFirstElem(classif_to_merge)

    confidences = FileSearch_AND(iota2_classif_directory, True, "_model_",
                                 "confidence", "_seed_", "_SUBREGION_", ".tif")
    confidences_to_merge = []
    for confidence in confidences:
        model = os.path.basename(confidence).split("_")[model_pos_confidence]
        seed = os.path.basename(confidence).split("_")[seed_pos_confidence]
        tile_name = os.path.basename(confidence).split(
            "_")[tile_pos_confidence]
        suffix = ""
        if "_SAR_SUBREGION_" in confidence:
            suffix = "_SAR_"
        confidences_to_merge.append(
            ((model, seed, tile_name, suffix), confidence))
    confidences_to_merge = sortByFirstElem(confidences_to_merge)

    if not len(classif_to_merge) == len(confidences_to_merge):
        raise ValueError(
            "number of classification to merge : {} is different than number of confidence to merge : {}"
            .format(len(classif_to_merge), len(confidences_to_merge)))
    for (model_name, seed_num, tile_name,
         suffix), classif_list in classif_to_merge:
        output_dir, _ = os.path.split(classif_list[0])
        if suffix == "":
            classif_name = "_".join([
                "Classif", tile_name, "model", model_name, "seed", seed_num
            ]) + ".tif"
        else:
            classif_name = "_".join([
                "Classif", tile_name, "model", model_name, "seed", seed_num,
                "SAR"
            ]) + ".tif"
        rasters_to_merge.append({
            "rasters_list":
            classif_list,
            "merge_path":
            os.path.join(output_dir, classif_name)
        })
    for (model_name, seed_num, tile_name,
         suffix), confidence_list in confidences_to_merge:
        output_dir, _ = os.path.split(confidence_list[0])
        if suffix == "":
            confidence_name = "_".join([
                tile_name, "model", model_name, "confidence", "seed", seed_num
            ]) + ".tif"
        else:
            confidence_name = "_".join([
                tile_name, "model", model_name, "confidence", "seed", seed_num,
                "SAR"
            ]) + ".tif"
        rasters_to_merge.append({
            "rasters_list":
            confidence_list,
            "merge_path":
            os.path.join(output_dir, confidence_name)
        })
    return rasters_to_merge
示例#23
0
def get_learning_samples(
        learning_samples_dir: str,
        config_path: str) -> List[Dict[str, Union[str, List[str]]]]:
    """get sorted learning samples files from samples directory

    Parameters
    ----------
    learning_samples_dir : str
        path to iota2 output directory containing learning data base
    config_path : str
        path to iota2 configuration file

    Return
    ------
    list
        ist of dictionaries
        example :
        get_learning_samples() = [{"learning_file": path to the learning file data base,
                                   "feat_labels": feature's labels,
                                   "model_path": output model path
                                  },
                                  ...
                                 ]
    """
    from Common import ServiceConfigFile
    from iota2.Common.FileUtils import FileSearch_AND
    from iota2.Common.FileUtils import getVectorFeatures

    sar_suffix = "SAR"

    ground_truth = ServiceConfigFile.serviceConfigFile(config_path).getParam(
        "chain", "groundTruth")
    region_field = ServiceConfigFile.serviceConfigFile(config_path).getParam(
        "chain", "regionField")
    sar_opt_post_fusion = ServiceConfigFile.serviceConfigFile(
        config_path).getParam("argTrain", "dempster_shafer_SAR_Opt_fusion")
    iota2_outputs = ServiceConfigFile.serviceConfigFile(config_path).getParam(
        "chain", "outputPath")
    iota2_models_dir = os.path.join(iota2_outputs, "model")

    parameters = []
    seed_pos = 3
    model_pos = 2
    learning_files = FileSearch_AND(learning_samples_dir, True,
                                    "Samples_region_", "_learn.sqlite")
    if sar_opt_post_fusion:
        learning_files_sar = FileSearch_AND(
            learning_samples_dir, True, "Samples_region_",
            "_learn_{}.sqlite".format(sar_suffix))
        learning_files += learning_files_sar

    learning_files_sorted = []
    output_model_files_sorted = []
    features_labels_sorted = []
    for learning_file in learning_files:
        seed = os.path.basename(learning_file).split("_")[seed_pos].replace(
            "seed", "")
        model = os.path.basename(learning_file).split("_")[model_pos]
        learning_files_sorted.append((seed, model, learning_file))

    learning_files_sorted = sorted(learning_files_sorted,
                                   key=operator.itemgetter(0, 1))

    for _, _, learning_file in learning_files_sorted:
        seed = os.path.basename(learning_file).split("_")[seed_pos].replace(
            "seed", "")
        model = os.path.basename(learning_file).split("_")[model_pos]
        model_name = "model_{}_seed_{}.txt".format(model, seed)
        if "{}.sqlite".format(sar_suffix) in learning_file:
            model_name = model_name.replace(".txt",
                                            "_{}.txt".format(sar_suffix))
        model_path = os.path.join(iota2_models_dir, model_name)
        output_model_files_sorted.append(model_path)
        features_labels_sorted.append(
            getVectorFeatures(ground_truth, region_field, learning_file))

    parameters = [{
        "learning_file": learning_file,
        "feat_labels": features_labels,
        "model_path": model_path
    } for (seed, model, learning_file), model_path, features_labels in zip(
        learning_files_sorted, output_model_files_sorted,
        features_labels_sorted)]
    return parameters
示例#24
0
    def get_features(self, ram=128, logger=LOGGER):
        """generate user features. Concatenates all of them
        """
        from gdal import Warp
        from osgeo.gdalconst import GDT_Byte
        from iota2.Common.OtbAppBank import CreateConcatenateImagesApplication
        from iota2.Common.OtbAppBank import CreateSuperimposeApplication
        from iota2.Common.FileUtils import FileSearch_AND
        from iota2.Common.FileUtils import ensure_dir
        from iota2.Common.FileUtils import getRasterProjectionEPSG
        from iota2.Common.FileUtils import getRasterResolution
        from iota2.Common.FileUtils import getRasterNbands

        features_dir = os.path.join(self.features_dir, "tmp")
        ensure_dir(features_dir, raise_exe=False)
        features_out = os.path.join(features_dir, self.features_names)

        user_features_found = []
        user_features_bands = []
        for pattern in self.data_type:
            user_feature = FileSearch_AND(self.tile_directory, True, pattern)
            if user_feature:
                user_features_bands.append(getRasterNbands(user_feature[0]))
                user_features_found.append(user_feature[0])
            else:
                msg = "WARNING : '{}' not found in {}".format(
                    pattern, self.tile_directory)
                logger.error(msg)
                raise Exception(msg)

        user_feat_stack = CreateConcatenateImagesApplication({
            "il":
            user_features_found,
            "ram":
            str(ram),
            "out":
            features_out
        })
        base_ref = user_features_found[0]
        base_ref_projection = getRasterProjectionEPSG(base_ref)
        if not os.path.exists(self.ref_image):
            base_ref_res_x, _ = getRasterResolution(base_ref)
            Warp(self.ref_image,
                 base_ref,
                 multithread=True,
                 format="GTiff",
                 xRes=base_ref_res_x,
                 yRes=base_ref_res_x,
                 outputType=GDT_Byte,
                 srcSRS="EPSG:{}".format(base_ref_projection),
                 dstSRS="EPSG:{}".format(self.target_proj))
        app_dep = []
        if int(base_ref_projection) != (self.target_proj):
            user_feat_stack.Execute()
            app_dep.append(user_feat_stack)
            user_feat_stack, _ = CreateSuperimposeApplication({
                "inr": self.ref_image,
                "inm": user_feat_stack,
                "out": features_out,
                "ram": str(ram)
            })
        features_labels = [
            "{}_band_{}".format(pattern, band_num)
            for pattern, nb_bands in zip(self.data_type, user_features_bands)
            for band_num in range(nb_bands)
        ]
        return (user_feat_stack, app_dep), features_labels
示例#25
0
def train_autoContext_parameters(iota2_directory: str,
                                 regionField: str) -> List[Param]:
    """feed train_autoContext function

    Parameters
    ----------
    iota2_directory : string
        path to iota²'s running directory
    regionField : string
        region's field
    Return
    ------
    parameters : list
        dictionary describing input parameters
    """
    from iota2.Common.FileUtils import FileSearch_AND
    from iota2.Common.FileUtils import sortByFirstElem
    from iota2.Learning.TrainingCmd import config_model

    parameters = []

    pathToModelConfig = os.path.join(iota2_directory, "config_model",
                                     "configModel.cfg")
    configModel = config_model(iota2_directory, regionField)
    if not os.path.exists(pathToModelConfig):
        with open(pathToModelConfig, "w") as configFile:
            configFile.write(configModel)

    learningSamples_directory = os.path.join(iota2_directory,
                                             "learningSamples")
    tile_position = 0
    model_position = 2
    seed_position = 3

    learning_samples = FileSearch_AND(learningSamples_directory, False,
                                      "_Samples_learn.sqlite")

    learning_models = [
        ((c_file.split("_")[model_position],
          int(c_file.split("_")[seed_position].replace("seed", ""))), c_file)
        for c_file in learning_samples
    ]

    learning_models = sortByFirstElem(learning_models)

    for (model_name, seed_num), learning_files in learning_models:
        tiles = [
            learning_file.split("_")[tile_position]
            for learning_file in learning_files
        ]

        assert len(set(tiles)) == len(learning_files)

        tiles_slic = []
        for tile in tiles:
            tiles_slic.append(
                FileSearch_AND(
                    os.path.join(iota2_directory, "features", tile, "tmp"),
                    True, "SLIC", ".tif")[0])
        learning_files_path = [
            "{}.sqlite".format(
                os.path.join(learningSamples_directory, learning_file))
            for learning_file in learning_files
        ]
        SP_files_path = []
        for learning_file_path in learning_files_path:
            SP_file = learning_file_path.replace("learn.sqlite", "SP.sqlite")
            if not os.path.exists(SP_file):
                raise FileNotFoundError("{} not found".format(SP_file))
            SP_files_path.append(SP_file)

        parameters.append({
            "model_name": model_name,
            "seed": seed_num,
            "list_learning_samples": learning_files_path,
            "list_superPixel_samples": SP_files_path,
            "list_tiles": tiles,
            "list_slic": tiles_slic
        })
    return parameters
示例#26
0
    def get_features(self, ram=128, logger=LOGGER):
        """get sar features
        """
        import configparser
        from iota2.Common.FileUtils import getNbDateInTile, FileSearch_AND
        from iota2.Common.OtbAppBank import CreateConcatenateImagesApplication
        from iota2.Common.OtbAppBank import generateSARFeat_dates
        from iota2.Common.OtbAppBank import getInputParameterOutput

        if self.use_gapfilling:
            (s1_data,
             dependancies), s1_labels = self.get_time_series_gapFilling(ram)
        else:
            (s1_data, dependancies), s1_labels = self.get_time_series(ram)
        config = configparser.ConfigParser()
        config.read(self.s1_cfg)

        sar_features_expr = None
        if config.has_option("Features", "expression"):
            sar_features_expr_cfg = config.get("Features", "expression")
            if not "none" in sar_features_expr_cfg.lower():
                sar_features_expr = sar_features_expr_cfg.split(",")

        dependancies = [dependancies]
        s1_features = []
        sar_time_series = {
            "asc": {
                "vv": {
                    "App": None,
                    "availDates": None
                },
                "vh": {
                    "App": None,
                    "availDates": None
                }
            },
            "des": {
                "vv": {
                    "App": None,
                    "availDates": None
                },
                "vh": {
                    "App": None,
                    "availDates": None
                }
            }
        }
        for sensor_mode, time_series_app in list(s1_data.items()):
            _, polarisation, orbit = sensor_mode.split("_")
            # inputs
            if self.write_outputs_flag is False:
                time_series_app.Execute()
            else:
                time_series_raster = time_series_app.GetParameterValue(
                    getInputParameterOutput(time_series_app))
                if not os.path.exists(time_series_raster):
                    time_series_app.ExecuteAndWriteOutput()
                if os.path.exists(time_series_raster):
                    time_series_app = time_series_raster

            sar_time_series[orbit.lower()][
                polarisation.lower()]["App"] = time_series_app

            s1_features.append(time_series_app)
            dependancies.append(time_series_app)
            if self.use_gapfilling:
                date_file = FileSearch_AND(
                    self.features_dir, True,
                    "{}_{}_dates_interpolation.txt".format(
                        polarisation.lower(), orbit.upper()))[0]
            else:
                tar_dir = os.path.join(config.get("Paths", "output"),
                                       self.tile_name[1:])
                date_file = FileSearch_AND(
                    tar_dir, True,
                    "{}_{}_dates_input.txt".format(polarisation.lower(),
                                                   orbit.upper()))[0]
            sar_time_series[orbit.lower()][
                polarisation.lower()]["availDates"] = getNbDateInTile(
                    date_file, display=False, raw_dates=True)
        features_labels = []
        for sensor_mode, features in list(s1_labels.items()):
            features_labels += features
        if sar_features_expr:
            sar_user_features_raster = os.path.join(
                self.features_dir, "tmp", self.user_sar_features_name)
            user_sar_features, user_sar_features_lab = generateSARFeat_dates(
                sar_features_expr, sar_time_series, sar_user_features_raster)
            if self.write_outputs_flag is False:
                user_sar_features.Execute()
            else:
                if not os.path.exists(sar_user_features_raster):
                    user_sar_features.ExecuteAndWriteOutput()
                if os.path.exists(sar_user_features_raster):
                    user_sar_features = sar_user_features_raster
            dependancies.append(user_sar_features)
            s1_features.append(user_sar_features)
            features_labels += user_sar_features_lab
        sar_features_raster = os.path.join(self.features_dir, "tmp",
                                           self.sar_features_name)
        sar_features = CreateConcatenateImagesApplication({
            "il": s1_features,
            "out": sar_features_raster,
            "ram": str(ram)
        })
        return (sar_features, dependancies), features_labels
示例#27
0
    def footprint(self, ram=128):
        """
        compute the footprint
        """
        import os
        import glob
        from iota2.Common.OtbAppBank import CreateSuperimposeApplication
        from iota2.Common.OtbAppBank import CreateBandMathApplication
        from iota2.Common.FileUtils import ensure_dir
        from iota2.Common.FileUtils import FileSearch_AND

        footprint_dir = os.path.join(self.features_dir, "tmp")
        ensure_dir(footprint_dir, raise_exe=False)
        footprint_out = os.path.join(footprint_dir, self.footprint_name)

        input_dates = [
            os.path.join(self.tile_directory, cdir)
            for cdir in os.listdir(self.tile_directory)
        ]
        input_dates = self.sort_dates_directories(input_dates)

        # get date's footprint
        date_edge = []
        for date_dir in input_dates:
            date_edge.append(
                glob.glob(
                    os.path.join(
                        date_dir,
                        "{}{}".format(
                            self.struct_path_masks,
                            list(self.masks_rules.keys())[self.border_pos],
                        ),
                    ))[0])

        expr = " || ".join("1 - im{}b1".format(i + 1)
                           for i in range(len(date_edge)))
        l8_border = CreateBandMathApplication({
            "il": date_edge,
            "exp": expr,
            "ram": str(ram)
        })
        l8_border.Execute()

        reference_raster = self.ref_image

        if self.vhr_path.lower() != "none":
            reference_raster = FileSearch_AND(input_dates[0], True,
                                              self.data_type, "COREG",
                                              ".TIF")[0]

        # superimpose footprint
        superimp, _ = CreateSuperimposeApplication({
            "inr": reference_raster,
            "inm": l8_border,
            "out": footprint_out,
            "pixType": "uint8",
            "ram": str(ram),
        })

        # needed to travel throught iota2's library
        app_dep = [l8_border, _]

        return superimp, app_dep
示例#28
0
    def preprocess_date(self,
                        date_dir,
                        out_prepro,
                        working_dir=None,
                        ram=128,
                        logger=LOGGER):
        """
        Preprocess each date
        """
        import os
        import shutil
        from gdal import Warp
        import multiprocessing as mp
        from osgeo.gdalconst import GDT_Byte
        from collections import OrderedDict
        from iota2.Common.FileUtils import ensure_dir
        from iota2.Common.FileUtils import getRasterProjectionEPSG
        from iota2.Common.FileUtils import FileSearch_AND
        from iota2.Common.OtbAppBank import CreateConcatenateImagesApplication
        from iota2.Common.OtbAppBank import CreateSuperimposeApplication
        from iota2.Common.OtbAppBank import executeApp
        # manage directories
        date_stack_name = self.build_stack_date_name(date_dir)
        logger.debug(f"preprocessing {date_dir}")
        out_stack = os.path.join(date_dir, date_stack_name)
        if out_prepro:
            _, date_dir_name = os.path.split(date_dir)
            out_dir = os.path.join(out_prepro, date_dir_name)
            if not os.path.exists(out_dir):
                try:
                    os.mkdir(out_dir)
                except OSError:
                    logger.warning(f"{out_dir} already exists")
            out_stack = os.path.join(out_dir, date_stack_name)

        out_stack_processing = out_stack
        if working_dir:
            out_stack_processing = os.path.join(working_dir, date_stack_name)

        # get bands
        date_bands = [
            FileSearch_AND(date_dir, True,
                           "{}_{}.tif".format(self.data_type, bands_name))[0]
            for bands_name in self.stack_band_position
        ]

        # tile reference image generation
        base_ref = date_bands[0]
        ensure_dir(os.path.dirname(self.ref_image), raise_exe=False)
        base_ref_projection = getRasterProjectionEPSG(base_ref)

        if not os.path.exists(self.ref_image):
            logger.info(
                f"reference image generation {self.ref_image} from {base_ref}")
            Warp(self.ref_image,
                 base_ref,
                 multithread=True,
                 format="GTiff",
                 xRes=self.native_res,
                 yRes=self.native_res,
                 outputType=GDT_Byte,
                 srcSRS="EPSG:{}".format(base_ref_projection),
                 dstSRS="EPSG:{}".format(self.target_proj))

        # reproject / resample
        bands_proj = OrderedDict()
        all_reproj = []
        for band, band_name in zip(date_bands, self.stack_band_position):
            superimp, _ = CreateSuperimposeApplication({
                "inr": self.ref_image,
                "inm": band,
                "ram": str(ram)
            })
            bands_proj[band_name] = superimp
            all_reproj.append(superimp)

        if self.write_dates_stack:
            for reproj in all_reproj:
                reproj.Execute()
            date_stack = CreateConcatenateImagesApplication({
                "il":
                all_reproj,
                "ram":
                str(ram),
                "pixType":
                "int16",
                "out":
                out_stack_processing
            })
            same_proj = False
            if os.path.exists(out_stack):
                same_proj = int(getRasterProjectionEPSG(out_stack)) == int(
                    self.target_proj)

            if not os.path.exists(out_stack) or same_proj is False:
                # ~ date_stack.ExecuteAndWriteOutput()
                multi_proc = mp.Process(target=executeApp, args=[date_stack])
                multi_proc.start()
                multi_proc.join()
                if working_dir:
                    shutil.copy(out_stack_processing, out_stack)
                    os.remove(out_stack_processing)
        return bands_proj if self.write_dates_stack is False else out_stack
示例#29
0
def get_class_by_models(iota2_samples_dir, data_field, model=None):
    """ inform which class will be used to by models

    Parameters
    ----------
    iota2_samples_dir : string
        path to the directory containing samples dedicated to learn models

    data_field : string
        field which contains labels in vector file

    Return
    ------
    dic[model][seed]

    Example
    -------
    >>> dico_models = get_class_by_models("/somewhere/learningSamples", "code")
    >>> print dico_models["1"][0]
    >>> [11, 12, 31]
    """
    from iota2.Common.FileUtils import FileSearch_AND
    from iota2.Common.FileUtils import getFieldElement

    class_models = {}
    if model is not None:
        modelpath = os.path.dirname(model)
        models_files = FileSearch_AND(modelpath, True, "model", "seed", ".txt")

        for model_file in models_files:
            model_name = os.path.splitext(
                os.path.basename(model_file))[0].split("_")[1]
            class_models[model_name] = {}
            seed_number = int(
                os.path.splitext(
                    os.path.basename(model_file))[0].split("_")[3].replace(
                        ".txt", ""))
            classes = get_model_dictionnary(model_file)
            class_models[model_name][seed_number] = classes
    else:
        samples_files = FileSearch_AND(iota2_samples_dir, True,
                                       "Samples_region_", "_seed",
                                       "_learn.sqlite")

        for samples_file in samples_files:
            model_name = os.path.splitext(
                os.path.basename(samples_file))[0].split("_")[2]
            class_models[model_name] = {}
        for samples_file in samples_files:
            model_name = os.path.splitext(
                os.path.basename(samples_file))[0].split("_")[2]
            seed_number = int(
                os.path.splitext(
                    os.path.basename(samples_file))[0].split("_")[3].replace(
                        "seed", ""))
            class_models[model_name][seed_number] = sorted(
                getFieldElement(samples_file,
                                driverName="SQLite",
                                field=data_field.lower(),
                                mode="unique",
                                elemType="int"))
    return class_models
示例#30
0
    def get_time_series(self, ram=128):
        """
        TODO : be able of using a date interval
        Return
        ------
            list
                [(otb_Application, some otb's objects), time_series_labels]
                Functions dealing with otb's application instance has to
                returns every objects in the pipeline
        """
        import os
        from iota2.Common.OtbAppBank import CreateConcatenateImagesApplication
        from iota2.Common.OtbAppBank import CreateSuperimposeApplication
        from iota2.Common.FileUtils import ensure_dir
        from iota2.Common.FileUtils import getRasterProjectionEPSG
        from iota2.Common.FileUtils import FileSearch_AND

        # needed to travel throught iota2's library
        app_dep = []

        input_dates = [
            os.path.join(self.tile_directory, cdir)
            for cdir in os.listdir(self.tile_directory)
        ]
        input_dates = self.sort_dates_directories(input_dates)

        # get date's data
        date_data = []
        for date_dir in input_dates:
            l5_old_date = FileSearch_AND(date_dir, True, self.data_type,
                                         ".TIF")[0]
            if self.vhr_path.lower() != "none":
                l5_old_date = FileSearch_AND(date_dir, True, self.data_type,
                                             "COREG", ".TIF")[0]
            date_data.append(l5_old_date)

        time_series_dir = os.path.join(self.features_dir, "tmp")
        ensure_dir(time_series_dir, raise_exe=False)
        times_series_raster = os.path.join(time_series_dir,
                                           self.time_series_name)
        dates_time_series = CreateConcatenateImagesApplication({
            "il": date_data,
            "out": times_series_raster,
            "ram": str(ram)
        })
        _, dates_in = self.write_dates_file()

        # build labels
        features_labels = [
            f"{self.__class__.name}_{band_name}_{date}" for date in dates_in
            for band_name in self.stack_band_position
        ]

        # if not all bands must be used
        if self.extracted_bands:
            app_dep.append(dates_time_series)
            (dates_time_series,
             features_labels) = self.extract_bands_time_series(
                 dates_time_series, dates_in, len(self.stack_band_position),
                 self.extracted_bands, ram)
        origin_proj = getRasterProjectionEPSG(date_data[0])
        if int(origin_proj) != int(self.target_proj):
            dates_time_series.Execute()
            app_dep.append(dates_time_series)
            self.generate_raster_ref(date_data[0])
            dates_time_series, _ = CreateSuperimposeApplication({
                "inr": self.ref_image,
                "inm": self.masks_rules,
                "out": times_series_raster,
                "ram": str(ram)
            })
        return (dates_time_series, app_dep), features_labels