コード例 #1
0
    def get_features(self, ram=128, logger=LOGGER):
        """generate user features. Concatenates all of them
        """
        from gdal import Warp
        from osgeo.gdalconst import GDT_Byte
        from iota2.Common.OtbAppBank import CreateConcatenateImagesApplication
        from iota2.Common.OtbAppBank import CreateSuperimposeApplication
        from iota2.Common.FileUtils import FileSearch_AND
        from iota2.Common.FileUtils import ensure_dir
        from iota2.Common.FileUtils import getRasterProjectionEPSG
        from iota2.Common.FileUtils import getRasterResolution
        from iota2.Common.FileUtils import getRasterNbands

        features_dir = os.path.join(self.features_dir, "tmp")
        ensure_dir(features_dir, raise_exe=False)
        features_out = os.path.join(features_dir, self.features_names)

        user_features_found = []
        user_features_bands = []
        for pattern in self.data_type:
            user_feature = FileSearch_AND(self.tile_directory, True, pattern)
            if user_feature:
                user_features_bands.append(getRasterNbands(user_feature[0]))
                user_features_found.append(user_feature[0])
            else:
                msg = "WARNING : '{}' not found in {}".format(
                    pattern, self.tile_directory)
                logger.error(msg)
                raise Exception(msg)

        user_feat_stack = CreateConcatenateImagesApplication({
            "il":
            user_features_found,
            "ram":
            str(ram),
            "out":
            features_out
        })
        base_ref = user_features_found[0]
        base_ref_projection = getRasterProjectionEPSG(base_ref)
        if not os.path.exists(self.ref_image):
            base_ref_res_x, _ = getRasterResolution(base_ref)
            Warp(self.ref_image,
                 base_ref,
                 multithread=True,
                 format="GTiff",
                 xRes=base_ref_res_x,
                 yRes=base_ref_res_x,
                 outputType=GDT_Byte,
                 srcSRS="EPSG:{}".format(base_ref_projection),
                 dstSRS="EPSG:{}".format(self.target_proj))
        app_dep = []
        if int(base_ref_projection) != (self.target_proj):
            user_feat_stack.Execute()
            app_dep.append(user_feat_stack)
            user_feat_stack, _ = CreateSuperimposeApplication({
                "inr": self.ref_image,
                "inm": user_feat_stack,
                "out": features_out,
                "ram": str(ram)
            })
        features_labels = [
            "{}_band_{}".format(pattern, band_num)
            for pattern, nb_bands in zip(self.data_type, user_features_bands)
            for band_num in range(nb_bands)
        ]
        return (user_feat_stack, app_dep), features_labels
コード例 #2
0
    def get_time_series_masks(self, ram=128):
        """
        get time series masks
        """
        import os
        import glob
        from iota2.Common.OtbAppBank import CreateConcatenateImagesApplication
        from iota2.Common.OtbAppBank import CreateSuperimposeApplication
        from iota2.Common.OtbAppBank import CreateBandMathApplication
        from iota2.Common.FileUtils import ensure_dir
        from iota2.Common.FileUtils import getRasterProjectionEPSG

        time_series_dir = os.path.join(self.features_dir, "tmp")
        ensure_dir(time_series_dir, raise_exe=False)
        times_series_mask = os.path.join(time_series_dir,
                                         self.time_series_masks_name)

        # needed to travel throught iota2's library
        app_dep = []

        input_dates = [
            os.path.join(self.tile_directory, cdir)
            for cdir in os.listdir(self.tile_directory)
        ]
        input_dates = self.sort_dates_directories(input_dates)

        # get date's data
        date_data = []
        div_mask_patter = list(self.masks_rules.keys())[self.border_pos]
        cloud_mask_patter = list(self.masks_rules.keys())[self.cloud_pos]
        sat_mask_patter = list(self.masks_rules.keys())[self.sat_pos]
        if self.vhr_path.lower() != "none":
            div_mask_patter = div_mask_patter.replace(".TIF", "_COREG.TIF")
            cloud_mask_patter = div_mask_patter.replace(".TIF", "_COREG.TIF")
            sat_mask_patter = div_mask_patter.replace(".TIF", "_COREG.TIF")

        for date_dir in input_dates:
            div_mask = glob.glob(
                os.path.join(date_dir,
                             f"{self.struct_path_masks}{div_mask_patter}"))[0]
            cloud_mask = glob.glob(
                os.path.join(
                    date_dir,
                    f"{self.struct_path_masks}{cloud_mask_patter}"))[0]
            sat_mask = glob.glob(
                os.path.join(date_dir,
                             f"{self.struct_path_masks}{sat_mask_patter}"))[0]
            # im1 = div, im2 = cloud, im3 = sat
            div_expr = "(1-(im1b1/2==rint(im1b1/2)))"
            cloud_expr = "im2b1"
            sat_expr = "im3b1"
            # expr = "*".join([div_expr, cloud_expr, sat_expr])
            expr = f"({div_expr} + {cloud_expr} + {sat_expr})==0?0:1"
            date_binary_mask = CreateBandMathApplication({
                "il": [div_mask, cloud_mask, sat_mask],
                "exp":
                expr
            })
            date_binary_mask.Execute()
            date_data.append(date_binary_mask)
            app_dep.append(date_binary_mask)
        dates_time_series_mask = CreateConcatenateImagesApplication({
            "il":
            date_data,
            "ram":
            str(ram),
            "out":
            times_series_mask
        })

        origin_proj = getRasterProjectionEPSG(sat_mask)
        if int(origin_proj) != int(self.target_proj):
            dates_time_series_mask.Execute()
            app_dep.append(dates_time_series_mask)
            self.generate_raster_ref(sat_mask)
            dates_time_series_mask, _ = CreateSuperimposeApplication({
                "inr":
                self.ref_image,
                "inm":
                dates_time_series_mask,
                "interpolator":
                "nn",
                "out":
                times_series_mask,
                "ram":
                str(ram)
            })

        return dates_time_series_mask, app_dep, len(date_data)
コード例 #3
0
def validity(tile_name,
             config_path,
             output_path,
             maskOut_name,
             view_threshold,
             workingDirectory=None,
             RAM=128):
    """
    function dedicated to compute validity raster/vector by tile

    Parameters
    ----------
    tile_name [string]
        tile's name
    config_path [string]
        absolute path to the configuration file
    maskOut_name [string]
        output vector mask's name
    view_threshold [int]
        threshold
    working_directory [string]
        absolute path to a working directory
    RAM [int]
        pipeline's size (Mo)
    """
    import os
    import shutil
    from iota2.Common.ServiceConfigFile import iota2_parameters
    from iota2.Sensors.Sensors_container import sensors_container
    from iota2.Common.OtbAppBank import CreateConcatenateImagesApplication
    from iota2.Common.OtbAppBank import CreateBandMathApplication
    from iota2.Common.Utils import run
    from iota2.Common.FileUtils import erodeShapeFile
    from iota2.Common.FileUtils import removeShape
    from iota2.Common.FileUtils import ensure_dir

    features_dir = os.path.join(output_path, "features", tile_name)
    validity_name = "nbView.tif"

    validity_out = os.path.join(features_dir, validity_name)
    validity_processing = validity_out
    if workingDirectory:
        ensure_dir(os.path.join(workingDirectory, tile_name))
        validity_processing = os.path.join(workingDirectory, tile_name,
                                           validity_name)

    running_parameters = iota2_parameters(config_path)
    sensors_parameters = running_parameters.get_sensors_parameters(tile_name)
    remote_sensor_container = sensors_container(tile_name, workingDirectory,
                                                output_path,
                                                **sensors_parameters)

    sensors_time_series_masks = remote_sensor_container.get_sensors_time_series_masks(
        available_ram=RAM)
    sensors_masks_size = []
    sensors_masks = []
    for sensor_name, (time_series_masks, time_series_dep,
                      nb_bands) in sensors_time_series_masks:
        if sensor_name.lower() == "sentinel1":
            for _, time_series_masks_app in list(time_series_masks.items()):
                time_series_masks_app.Execute()
                sensors_masks.append(time_series_masks_app)
        else:
            time_series_masks.Execute()
            sensors_masks.append(time_series_masks)
        sensors_masks_size.append(nb_bands)

    total_dates = sum(sensors_masks_size)
    merge_masks = CreateConcatenateImagesApplication({
        "il": sensors_masks,
        "ram": str(RAM)
    })
    merge_masks.Execute()

    validity_app = CreateBandMathApplication({
        "il":
        merge_masks,
        "exp":
        "{}-({})".format(
            total_dates,
            "+".join(["im1b{}".format(i + 1) for i in range(total_dates)])),
        "ram":
        str(0.7 * RAM),
        "pixType":
        "uint8" if total_dates < 255 else "uint16",
        "out":
        validity_processing
    })
    if not os.path.exists(os.path.join(features_dir, validity_name)):
        validity_app.ExecuteAndWriteOutput()
        if workingDirectory:
            shutil.copy(validity_processing,
                        os.path.join(features_dir, validity_name))
    threshold_raster_out = os.path.join(features_dir,
                                        maskOut_name.replace(".shp", ".tif"))
    threshold_vector_out_tmp = os.path.join(
        features_dir, maskOut_name.replace(".shp", "_TMP.shp"))
    threshold_vector_out = os.path.join(features_dir, maskOut_name)

    input_threshold = validity_processing if os.path.exists(
        validity_processing) else validity_out

    threshold_raster = CreateBandMathApplication({
        "il":
        input_threshold,
        "exp":
        "im1b1>={}?1:0".format(view_threshold),
        "ram":
        str(0.7 * RAM),
        "pixType":
        "uint8",
        "out":
        threshold_raster_out
    })
    threshold_raster.ExecuteAndWriteOutput()
    cmd_poly = f"gdal_polygonize.py -mask {threshold_raster_out} {threshold_raster_out} -f \"ESRI Shapefile\" {threshold_vector_out_tmp} {os.path.splitext(os.path.basename(threshold_vector_out_tmp))[0]} cloud"
    run(cmd_poly)

    erodeShapeFile(threshold_vector_out_tmp, threshold_vector_out, 0.1)
    os.remove(threshold_raster_out)
    removeShape(threshold_vector_out_tmp.replace(".shp", ""),
                [".prj", ".shp", ".dbf", ".shx"])
コード例 #4
0
    def get_time_series(self, ram=128):
        """
        TODO : be able of using a date interval
        Return
        ------
            list
                [(otb_Application, some otb's objects), time_series_labels]
                Functions dealing with otb's application instance has to
                returns every objects in the pipeline
        """
        import os
        from iota2.Common.OtbAppBank import CreateConcatenateImagesApplication
        from iota2.Common.OtbAppBank import CreateSuperimposeApplication
        from iota2.Common.FileUtils import ensure_dir
        from iota2.Common.FileUtils import getRasterProjectionEPSG
        from iota2.Common.FileUtils import FileSearch_AND

        # needed to travel throught iota2's library
        app_dep = []

        input_dates = [
            os.path.join(self.tile_directory, cdir)
            for cdir in os.listdir(self.tile_directory)
        ]
        input_dates = self.sort_dates_directories(input_dates)

        # get date's data
        date_data = []
        for date_dir in input_dates:
            l5_old_date = FileSearch_AND(date_dir, True, self.data_type,
                                         ".TIF")[0]
            if self.vhr_path.lower() != "none":
                l5_old_date = FileSearch_AND(date_dir, True, self.data_type,
                                             "COREG", ".TIF")[0]
            date_data.append(l5_old_date)

        time_series_dir = os.path.join(self.features_dir, "tmp")
        ensure_dir(time_series_dir, raise_exe=False)
        times_series_raster = os.path.join(time_series_dir,
                                           self.time_series_name)
        dates_time_series = CreateConcatenateImagesApplication({
            "il": date_data,
            "out": times_series_raster,
            "ram": str(ram)
        })
        _, dates_in = self.write_dates_file()

        # build labels
        features_labels = [
            f"{self.__class__.name}_{band_name}_{date}" for date in dates_in
            for band_name in self.stack_band_position
        ]

        # if not all bands must be used
        if self.extracted_bands:
            app_dep.append(dates_time_series)
            (dates_time_series,
             features_labels) = self.extract_bands_time_series(
                 dates_time_series, dates_in, len(self.stack_band_position),
                 self.extracted_bands, ram)
        origin_proj = getRasterProjectionEPSG(date_data[0])
        if int(origin_proj) != int(self.target_proj):
            dates_time_series.Execute()
            app_dep.append(dates_time_series)
            self.generate_raster_ref(date_data[0])
            dates_time_series, _ = CreateSuperimposeApplication({
                "inr": self.ref_image,
                "inm": self.masks_rules,
                "out": times_series_raster,
                "ram": str(ram)
            })
        return (dates_time_series, app_dep), features_labels
コード例 #5
0
ファイル: Sentinel_1.py プロジェクト: inglada/iota2
    def get_time_series_gapFilling(self, ram=128):
        """
        Due to the SAR data, time series must be split by polarisation
        and orbit (ascending / descending)
        """
        import configparser

        from iota2.Common.FileUtils import getNbDateInTile
        from iota2.Common.OtbAppBank import getSARstack
        from iota2.Common.OtbAppBank import CreateConcatenateImagesApplication
        from iota2.Common.OtbAppBank import CreateImageTimeSeriesGapFillingApplication
        from iota2.Common.OtbAppBank import getInputParameterOutput

        (all_filtered, all_masks, interp_date_files,
         input_date_files) = getSARstack(self.s1_cfg,
                                         self.tile_name,
                                         self.all_tiles.split(" "),
                                         os.path.join(self.i2_output_path,
                                                      "features"),
                                         workingDirectory=None)
        # to be clearer
        s1_data = OrderedDict()
        s1_labels = OrderedDict()

        config = configparser.ConfigParser()
        config.read(self.s1_cfg)

        interpolation_method = "linear"
        if config.has_option("Processing", "gapFilling_interpolation"):
            interpolation_method = config.get("Processing",
                                              "gapFilling_interpolation")
        dependancies = []

        for filtered, masks, interp_dates, in_dates in zip(
                all_filtered, all_masks, interp_date_files, input_date_files):
            sar_mode = os.path.basename(
                filtered.GetParameterValue("outputstack"))
            sar_mode = "_".join(os.path.splitext(sar_mode)[0].split("_")[0:-1])
            polarisation = sar_mode.split("_")[1]
            orbit = sar_mode.split("_")[2]

            gapfilling_orbit_pol_name_masks = f"{self.gapfilling_orbit_pol_name_mask}_{orbit}_{polarisation}.tif"
            gapfilling_raster_mask = os.path.join(
                self.features_dir, "tmp", gapfilling_orbit_pol_name_masks)

            masks_stack = CreateConcatenateImagesApplication({
                "il": masks,
                "out": gapfilling_raster_mask,
                "ram": str(ram)
            })

            if self.write_outputs_flag is False:
                filtered.Execute()
                masks_stack.Execute()
            else:
                filtered_raster = filtered.GetParameterValue(
                    getInputParameterOutput(filtered))
                masks_stack_raster = masks_stack.GetParameterValue(
                    getInputParameterOutput(masks_stack))
                if not os.path.exists(masks_stack_raster):
                    masks_stack.ExecuteAndWriteOutput()
                if not os.path.exists(filtered_raster):
                    filtered.ExecuteAndWriteOutput()
                if os.path.exists(masks_stack_raster):
                    masks_stack = masks_stack_raster
                if os.path.exists(filtered_raster):
                    filtered = filtered_raster
            dependancies.append((filtered, masks_stack))
            gapfilling_orbit_pol_name = f"{self.gapfilling_orbit_pol_name}_{orbit}_{polarisation}.tif"
            gapfilling_raster = os.path.join(self.features_dir, "tmp",
                                             gapfilling_orbit_pol_name)

            gap_app = CreateImageTimeSeriesGapFillingApplication({
                "in":
                filtered,
                "mask":
                masks_stack,
                "it":
                interpolation_method,
                "id":
                in_dates,
                "od":
                interp_dates,
                "comp":
                str(1),
                "out":
                gapfilling_raster
            })
            s1_data[sar_mode] = gap_app

            sar_dates = sorted(getNbDateInTile(interp_dates,
                                               display=False,
                                               raw_dates=True),
                               key=lambda x: int(x))
            labels = [
                "{}_{}_{}_{}".format(self.__class__.name, orbit, polarisation,
                                     date).lower() for date in sar_dates
            ]
            s1_labels[sar_mode] = labels
        return (s1_data, dependancies), s1_labels