Exemplo n.º 1
0
def get_concave_hull_from_basic_triangulation(in_v_long, in_v_lat, in_range, in_nb_pix_range):
    """
    Compute the concave hull of a set of points using classifcal Delauney triangulation.

    :param in_v_long: longitudes of points for which to compute the hull
    :type in_v_long: 1D-array of float
    :param in_v_lat: latitudes of points for which to compute the hull
    :type in_v_lat: 1D-array of float
    :param in_range: range of points for which to compute the hull
    :type in_range: 1D-array of int
    :param in_nb_pix_range: maximal number of pixel in range
    :type in_nb_pix_range: int

    :return the hull of the input set of points
    :rtype: OGRMultiPolygon
    """
    # Get instance of service config file
    cfg = service_config_file.get_instance()

    logger = logging.getLogger("my_hull")

    nb_pix = in_v_long.size
    # ============================ HULL COMPUTATION FOR BIG LAKES ===========================
    # ============================  TO CHANGE AS SOON AS POSSIBLE ===========================
    # if the lake contains more than 500 000 pixels, the number of pixel is restricted to 500 000. Pixels are randomly selected.
    nb_pix_max_delauney = cfg.getint("CONFIG_PARAMS", "NB_PIX_MAX_DELAUNEY")
    if nb_pix > nb_pix_max_delauney:
        id_selected = np.random.randint(nb_pix, size=int(nb_pix_max_delauney))
        logger.warning(
            "The number of pixel of the lake is reduced to %d" % (nb_pix_max_delauney))
        in_v_long = in_v_long[id_selected]
        in_v_lat = in_v_lat[id_selected]
        in_range = in_range[id_selected]
        alpha_ratio = nb_pix_max_delauney/nb_pix
        nb_pix = in_v_long.size
        logger.debug("alpha_ratio : " + str(alpha_ratio))
    else:
        alpha_ratio = 1.
        
    # =======================================================================================

    # 1.1 - Gather coordinates in a 2D-array
    coords = np.zeros((nb_pix, 2))

    # 1.2 - Transform geographical coordinates into utm coordinates
    coords[:, 0], coords[:, 1], utm_epsg_code = my_tools.get_utm_coords_from_lonlat(in_v_long, in_v_lat)

    # 1.3 - Compute alpha shape
    hull_method = cfg.getfloat("CONFIG_PARAMS", "HULL_METHOD")
    if hull_method == 1.1:  # Without alpha parameter
        concave_hull_utm = get_concav_hull_bis(coords)
    else:  # With alpha parameter
        alpha = alpha_ratio*( 0.03 + 0.01 * in_range / in_nb_pix_range)  # alpha parameter ranges from 0.03 to 0.04 following the range index
        concave_hull_utm = alpha_shape(coords, alpha)

    # 1.4 - Transform concave hull polygon into geographical coordinates
    concave_hull = my_tools.get_lon_lat_polygon_from_utm(concave_hull_utm, utm_epsg_code)

    # 1.5 - Convert Shapely geometry to OGRPolygon or OGRMultiPolygon
    return ogr.CreateGeometryFromWkb(concave_hull.wkb)
Exemplo n.º 2
0
    def __init__(self, in_lake_db_filename, in_poly=None):
        """
        Constructor

        :param in_lake_db_filename: full path of the prior lake database
        :type in_lake_db_filename: string
        :param in_poly: polygon to spatially select lakes from DB
        :type in_poly: ogr.Polygon

        Variables of the object:
            - lakedb_id / String: Fieldname of lake id in lakedb
            - lake_layer / osgeo.ogr.Layer: lake_layer of a priori lake database
            - lake_ds / osgeo.ogr.DataSource: datasource of a priori lake database
        """
        logger = logging.getLogger(self.__class__.__name__)
        logger.info("Lake DB = %s", in_lake_db_filename)

        # 1 - Init LakeDb
        super().__init__()

        # 2 - Get config file
        cfg = service_config_file.get_instance()
        # Get lakedb_id parameter
        self.lake_db_id = cfg.get("DATABASES", "LAKE_DB_ID")

        # 3 - Open database
        self.lake_ds, self.lake_layer = self.open_shp(
            in_lake_db_filename, in_poly)  # Open Lake database
Exemplo n.º 3
0
    def __init__(self, in_product_type, in_obj_pixc, in_obj_pixc_vec,
                 in_obj_lake_db, in_layer_name):
        """
        Constructor

        :param in_product_type: type of product among "SP"=LakeSP and "TILE"=LakeTile
        :type in_product_type: string
        :param in_obj_pixc: pixel cloud from which to compute lake products
        :type in_obj_pixc: proc_pixc.PixelCloud or proc_pixc_sp.PixelCloudSP object
        :param in_obj_pixc_vec: pixel cloud complementary file from which to compute lake products
        :type in_obj_pixc_vec: proc_pixc_vec.PixelCloudVec or proc_pixc_vec_sp.PixelCloudVecSP object
        :param in_obj_lake_db: lake database
        :type in_obj_lake_db: lake_db.lakeDb_shp or lake_db.lakeDb_sqlite
        :param in_layer_name: name for lake product layer
        :type in_layer_name: string

        Variables of the object:
            - obj_pixc / proc_pixc.PixelCloud or proc_pixc_sp.PixelCloud: pixel cloud from which to compute lake products
            - obj_pixc_vec / proc_pixc_vec.PixelCloudVec or proc_pixc_vec_sp.PixelCloudVec: extra info for pixel cloud
            - obj_lake_db / lake_db.lakeDb_shp or lake_db.lakeDb_sqlite: lake database
            - shp_mem_layer / LakeTileShp_product: shapefile memory layer of the lake product
            - uniq_prior_id / set: list of uniq prior identifiers linked to observed objects
        """
        # Get instance of service config file
        self.cfg = service_config_file.get_instance()
        logger = logging.getLogger(self.__class__.__name__)
        logger.info("- start -")

        # Init variables
        # Product type
        if (in_product_type != "TILE") and (in_product_type != "SP"):
            message = "ERROR = product type is %s ; should be SP or TILE" % in_product_type
            raise service_error.ProcessingError(message, logger)
        else:
            self.type = in_product_type
        # Pixel cloud object
        self.obj_pixc = in_obj_pixc
        # Pixel cloud complementary file object
        self.obj_pixc_vec = in_obj_pixc_vec
        # Lake database object
        self.obj_lake_db = in_obj_lake_db

        # Initialize lake product layer
        self.shp_mem_layer = shp_file.LakeSPShpProduct(self.type,
                                                       in_layer_name)

        # Other variables
        self.uniq_prior_id = set(
        )  # List of uniq prior identifiers linked to observed objects

        if self.type == "TILE":
            # Find tile associated continent
            continent = self.obj_lake_db.link_poly_to_continent(
                self.obj_pixc.tile_poly)
            self.obj_pixc.pixc_metadata["continent"] = continent
            self.obj_pixc_vec.pixcvec_metadata["continent"] = continent
Exemplo n.º 4
0
def compute_lake_boundaries(in_v_long, in_v_lat, in_range, in_azimuth, in_nb_pix_range):
    """
    Compute the hull of a set of points determined by their coordinates given in input parameters

    :param in_v_long: longitudes of points for which to compute the hull
    :type in_v_long: 1D-array of float
    :param in_v_lat: latitudes of points for which to compute the hull
    :type in_v_lat: 1D-array of float
    :param in_range: range of points for which to compute the hull
    :type in_range: 1D-array of int
    :param in_azimuth: azimuth of points for which to compute the hull
    :type in_azimuth: 1D-array of int
    :param in_nb_pix_range: maximal number of pixel in range
    :type in_nb_pix_range: int

    :return the hull of the input set of points
    :rtype: OGRMultiPolygon
    """
    # Get instance of service config file
    cfg = service_config_file.get_instance()
    logger = logging.getLogger("my_hull")
    logger.debug("Computing lake boundaries")

    hull_method = cfg.getfloat("CONFIG_PARAMS", "HULL_METHOD")
    if len(in_v_long) < 4:
        logger.debug("Hull computation method : Convex hull because less than 4 pixels")
        retour = get_convex_hull(in_v_long, in_v_lat)

    elif hull_method == 0:  # 0 - CONVEX HULL
        logger.debug("Hull computation method : Convex hull")
        retour = get_convex_hull(in_v_long, in_v_lat)
        
    elif hull_method == 1.0:  # 1.0 : CONCAV HULL - Delaunay triangulation with CGAL
        logger.debug("Hull computation method : Concav hull computed in ground geometry, based on Delaunay triangulation - using CGAL library")
        retour = get_concave_hull_from_cgal_triangulation(in_v_long, in_v_lat, in_range, in_azimuth, in_nb_pix_range)
        
    elif hull_method == 1.1:  # 1.1 - CONCAV HULL - Delaunay triangulation 
        logger.debug("Hull computation method : Delauney triangulation")
        retour = get_concave_hull_from_basic_triangulation(in_v_long, in_v_lat, in_range, in_nb_pix_range)

    elif hull_method == 2:  # 2 - CONCAV HULL - Radar vectorisation method
        logger.debug("Hull computation method : radar vectorization (default)")
        retour = get_concave_hull_from_radar_vectorisation(in_range, in_azimuth, in_v_long, in_v_lat)

    else:
        message = "Concave hull computation method not understood"
        raise service_error.ProcessingError(message, logger)

    if retour.GetGeometryName() not in ["POLYGON", "MULTIPOLYGON"]:
        retour = ogr.Geometry(ogr.wkbPolygon)
    return retour
    def __init__(self):
        """
            Init class ServiceLogger
            :param name: name of the logger
        """
        THIS.klass = self
        cfg = service_config_file.get_instance()
        # logging format
        # LEVEL : DEBUG, INFO, WARNING, ERROR
        # log format :
        # YYYY-MM-DDThh:mm:ss.mmm     LEVEL:ClassName:FunctionName: message
        self.log_formatter = logging.Formatter(
            fmt=
            '%(asctime)s.%(msecs)03d     %(levelname)s | %(name)s::%(funcName)s | %(message)s',
            datefmt='%Y-%m-%dT%H:%M:%S')

        # set the name of the class in log messages
        self.root_logger = logging.getLogger()
        # set the logging level from the configuration file
        self.root_logger.setLevel(cfg.get('LOGGING', 'logFileLevel'))
        if not hasattr(self, 'first'):
            # First call to ServiceLogger
            self.first = True
            # create a log file
            self.file_handler = logging.FileHandler(cfg.get(
                'LOGGING', 'logFile'),
                                                    mode='w')
            self.file_handler.setFormatter(self.log_formatter)
            self.file_handler.setLevel(cfg.get('LOGGING', 'logFileLevel'))
            # create a memory Handler to bufferize SAS log message
            self.memory_handler = logging.handlers.MemoryHandler(
                1000, target=self.file_handler)
            # add logger
            self.root_logger.addHandler(self.memory_handler)

            if cfg.get('LOGGING', 'logConsole') == 'True':
                # logging in console
                self.console_handler = logging.StreamHandler()
                self.console_handler.setFormatter(self.log_formatter)
                self.console_handler.setLevel(
                    cfg.get('LOGGING', 'logConsoleLevel'))
                self.root_logger.addHandler(self.console_handler)
            else:
                self.console_handler = None
def compute_pixcvec_filename(in_laketile_pixcvec_filename, in_output_dir):
    """
    Compute L2_HR_PIXCVec filename from L2_HR_LakeTile_pixcvec filename

    :param in_laketile_pixcvec_filename: L2_HR_LakeTile_pixcvec filename to convert
    :type IN_laketile_filename: string
    :param in_output_dir: output directory
    :type in_output_dir: string
    """
    # Get config file
    cfg = service_config_file.get_instance()
    # Init variables
    product_counter = 1
    # get parameters
    pixcvec_pattern = PIXCVEC_PATTERN
    pixcvec_prefix = PIXCVEC_PREFIX
    pixcvec_suffix = PIXCVEC_SUFFIX
    pixcvec_pattern = pixcvec_pattern.replace(
        "PIXCVEC_PREFIX + ",
        pixcvec_prefix).replace('"', '').replace(" + PIXCVEC_SUFFIX",
                                                 pixcvec_suffix)
    lake_sp_crid = cfg.get("CRID", "LAKE_SP_CRID")
    # Get infos from input LakeTile_pixcvec filename
    tmp_dict = get_info_from_filename(in_laketile_pixcvec_filename, "LakeTile")

    # Compute associated PIXCVec filename
    filename = pixcvec_pattern % (int(tmp_dict["cycle"]), int(tmp_dict["pass"]), tmp_dict["tile_ref"], tmp_dict["start_date"], \
                                      tmp_dict["stop_date"], lake_sp_crid, product_counter)
    out_filename = os.path.join(in_output_dir, filename)

    while os.path.exists(out_filename):
        product_counter += 1
        filename = pixcvec_pattern % (int(tmp_dict["cycle"]), int(tmp_dict["pass"]), tmp_dict["tile_ref"], tmp_dict["start_date"], \
                                          tmp_dict["stop_date"], lake_sp_crid, product_counter)
        out_filename = os.path.join(in_output_dir, filename)

    return out_filename
Exemplo n.º 7
0
    def open_db(self,
                in_lake_db_filename,
                table_name,
                field_name,
                in_poly=None):
        """
        Open database, optionnally spatially select polygons and copy layer to memory

        :param in_poly: polygon to spatially select lakes from DB
        :type in_poly: ogr.Polygon
        :param table_name: table_name to load from file
        :type table_name: String
        :param field_name: field_name to load from table
        :type field_name: String
        :param in_poly: polygon to spatially select lakes from DB
        :type in_poly: ogr.Polygon
        """
        logger = logging.getLogger(self.__class__.__name__)
        cfg = service_config_file.get_instance()

        # Transform 3D geometry into 2D geometry (necessary for spatialite query)
        in_poly.FlattenTo2D()

        # Open the SQLite database and define the connector
        self.db_conn = sqlite3.connect(in_lake_db_filename, timeout=10)

        # Load spatialite extension
        self.db_conn.enable_load_extension(True)
        self.db_conn.execute('SELECT load_extension("mod_spatialite")')

        # Define the cursor
        self.db_cur = self.db_conn.cursor()

        if cfg.get('LOGGING', 'logFileLevel') == 'DEBUG':
            (lakes_nb,
             ) = self.db_cur.execute('SELECT count(*) from lake').fetchone()
            logger.debug(" %d features stored in table %s of database" %
                         (lakes_nb, table_name))

        # Create an output datasource in memory
        mem_driver = ogr.GetDriverByName('MEMORY')  # Memory driver

        # Open the memory datasource with write access
        ds = mem_driver.CreateDataSource('memData')

        # Set spatial projection
        srs = ogr.osr.SpatialReference()
        srs.ImportFromEPSG(4326)

        # Creating memory layer
        lyr = ds.CreateLayer(str('layer'), srs=srs, geom_type=ogr.wkbPolygon)
        lyr.CreateField(ogr.FieldDefn(field_name, ogr.OFTString))

        # Define the layer
        lyr_defn = lyr.GetLayerDefn()

        if in_poly is not None:
            cmd = "SELECT %s, AsText(geometry) FROM %s WHERE MBRIntersects(GeomFromText('%s'), %s.geometry);" % (
                field_name, table_name, in_poly.ExportToWkt(), table_name)
            self.db_cur.execute(cmd)
        else:
            cmd = "SELECT %s, AsText(geometry) FROM %s ;" % (field_name,
                                                             table_name)
            self.db_cur.execute(cmd)

        for row in self.db_cur:
            # Create empty feature/entity
            out_feat = ogr.Feature(lyr_defn)

            # Fill feature with ID and geometry from SQLite request
            out_feat.SetField(field_name, str(row[0]))
            multi_poly = ogr.CreateGeometryFromWkt(row[1])
            out_feat.SetGeometry(multi_poly)

            lyr.CreateFeature(out_feat)

            out_feat.Destroy()

        lyr.ResetReading()
        # Get memory lake_layer
        logger.info("%d features after focus over studied area" %
                    lyr.GetFeatureCount())

        # Close spatialite database
        self.db_conn.close()

        return ds, lyr
def tmpGetConfigFromServiceConfigFile():
    """
    Set global variables from serviceConfgiFile
    This function is temporary. It will be delete in the future
    when serviceConfigFile will be used by lake_tile
    """
    IN_config = service_config_file.get_instance()
    logger = logging.getLogger("locnes_variables")

    # Lake database
    global LAKE_DB
    lake_db_file = IN_config.get("DATABASES", "LAKE_DB")
    LAKE_DB = lake_db_file
    logger.info("> LAKE_DB = %s" % LAKE_DB)

    # Lake identifier attribute name in the database
    global LAKE_DB_ID
    lake_db_id = IN_config.get("DATABASES", "LAKE_DB_ID")
    LAKE_DB_ID = lake_db_id
    logger.info("> LAKE_DB_ID = %s" % LAKE_DB_ID)

    # Shapefile with polygons of continents
    global CONTINENT_FILE
    continent_file = IN_config.get("DATABASES", "CONTINENT_FILE")
    CONTINENT_FILE = continent_file
    logger.info("> CONTINENT_FILE = %s" % CONTINENT_FILE)

    # Water flags
    global FLAG_WATER
    FLAG_WATER = IN_config.get("CONFIG_PARAMS", "FLAG_WATER")
    logger.info("> FLAG_WATER = %s" % FLAG_WATER)

    # Dark water flags
    global FLAG_DARK
    FLAG_DARK = IN_config.get("CONFIG_PARAMS", "FLAG_DARK")
    logger.info("> FLAG_DARK = %s" % FLAG_DARK)

    # Hull method
    global HULL_METHOD
    HULL_METHOD = IN_config.getfloat("CONFIG_PARAMS", "HULL_METHOD")
    logger.info("> HULL_METHOD = %s" % HULL_METHOD)

    # Maximal standard deviation of height inside a lake
    global STD_HEIGHT_MAX
    STD_HEIGHT_MAX = IN_config.getfloat("CONFIG_PARAMS", "STD_HEIGHT_MAX")
    logger.info("> STD_HEIGHT_MAX = %s" % STD_HEIGHT_MAX)

    # Model to deal with big lake processing
    global BIGLAKE_MODEL
    BIGLAKE_MODEL = IN_config.get("CONFIG_PARAMS", "BIGLAKE_MODEL")
    logger.info("> BIGLAKE_MODEL = %s" % BIGLAKE_MODEL)

    # Min size for lake to be considered as big
    global BIGLAKE_MIN_SIZE
    BIGLAKE_MIN_SIZE = IN_config.getfloat("CONFIG_PARAMS", "BIGLAKE_MIN_SIZE")
    logger.info("> BIGLAKE_MIN_SIZE = %s" % BIGLAKE_MIN_SIZE)

    # Grid spacing for lake height smoothing
    global BIGLAKE_GRID_SPACING
    BIGLAKE_GRID_SPACING = IN_config.getint("CONFIG_PARAMS",
                                            "BIGLAKE_GRID_SPACING")
    logger.info("> BIGLAKE_GRID_SPACING = %s" % BIGLAKE_GRID_SPACING)

    # Grid resolution for lake height smoothing
    global BIGLAKE_GRID_RES
    BIGLAKE_GRID_RES = IN_config.getint("CONFIG_PARAMS", "BIGLAKE_GRID_RES")
    logger.info("> BIGLAKE_GRID_RES = %s" % BIGLAKE_GRID_RES)
    def __init__(self, in_lake_tile_file_list, in_continent, in_out_dir):
        """
        Constructor of LakeSP filenames

        :param in_lake_tile_file_list: list of LakeTile_shp files full path
        :type in_lake_tile_file_list: list of string
        :param in_continent: continent concerning the LakeSP
        :type in_continent: string
        :param in_out_dir: output directory
        :type in_out_dir: string
        """
        logger = logging.getLogger(self.__class__.__name__)
        logger.info("- start -")
        # Get config file
        cfg = service_config_file.get_instance()

        # 1 - Init variables
        self.lake_tile_file_list = in_lake_tile_file_list  # list of LakeTile_shp files full path
        self.out_dir = in_out_dir  # Output directory
        self.product_counter = 1  # Product counter
        # get parameters
        self.lake_sp_pattern = LAKE_SP_PATTERN
        self.lake_sp_prefix = LAKE_SP_PREFIX
        self.lake_sp_pattern = self.lake_sp_pattern.replace(
            "LAKE_SP_PREFIX + ", self.lake_sp_prefix).replace('"', '')
        self.lake_sp_pattern_no_cont = LAKE_SP_PATTERN_NO_CONT.replace(
            "LAKE_SP_PREFIX + ", self.lake_sp_prefix).replace('"', '')

        self.lake_sp_crid = cfg.get("CRID", "LAKE_SP_CRID")
        # Lake Id prefix
        self.lake_id_prefix = ""

        # 2 - Retrieve info from LakeTile filename
        tmp_dict = get_info_from_filename(self.lake_tile_file_list[0],
                                          "LakeTile")
        # 2.1 - Cycle number
        cycle_num = tmp_dict["cycle"]
        if cycle_num is None:
            self.cycle_num = 999
            logger.info(
                "WARNING: cycle number has not been found in LakeTile filename %s -> set to default value = %03d",
                os.path.basename(self.lake_tile_file_list[0]), self.cycle_num)
        else:
            self.cycle_num = int(cycle_num)
            logger.info("Cycle number = %03d", self.cycle_num)
        # 2.2 - Pass number
        pass_num = int(tmp_dict["pass"])
        if pass_num is None:
            self.pass_num = 999
            logger.info(
                "WARNING: pass number has not been found in LakeTile filename %s -> set to default value = %03d",
                os.path.basename(self.lake_tile_file_list[0]), self.pass_num)
        else:
            self.pass_num = int(pass_num)
            logger.info("Pass number = %03d", self.pass_num)
        # 2.3 - Continent
        if in_continent == "":
            self.continent = ""
            logger.info("WARNING: no continental split")
        else:
            self.continent = in_continent
            logger.info("Continent = %s", self.continent)

        # 3 - Retrieve start and stop dates from LakeTile_shp filenames
        self.compute_start_stop_dates()

        # 4 - Compute output filenames
        self.compute_lake_sp_filename()  # LakeSP filename
    def __init__(self, in_pixc_file, in_pixc_vec_river_file, in_out_dir):
        """
        Constructor of LakeTile filenames

        :param IN_pixc_file: PIXC file full path
        :type IN_pixc_file: string
        :param IN_pixc_vec_river_file: PIXCVecRiver file full path
        :type IN_pixc_vec_river_file: string
        :param IN_out_dir: output directory
        :type IN_out_dir: string
        """
        logger = logging.getLogger(self.__class__.__name__)
        logger.info("- start -")
        # Get config file
        cfg = service_config_file.get_instance()

        # 1 - Init variables
        self.pixc_file = in_pixc_file  # PIXC file full path
        self.pixc_vec_river_file = in_pixc_vec_river_file  # PIXCVecRiver full path
        self.out_dir = in_out_dir  # Output directory
        self.product_counter = 1  # Product counter
        # get parameters
        self.lake_tile_pattern = LAKE_TILE_PATTERN
        self.lake_tile_prefix = LAKE_TILE_PREFIX
        self.lake_tile_pattern = self.lake_tile_pattern.replace(
            "LAKE_TILE_PREFIX + ", self.lake_tile_prefix).replace('"', '')
        self.lake_tile_crid = cfg.get("CRID", "LAKE_TILE_CRID")
        self.lake_tile_shp_suffix = LAKE_TILE_SHP_SUFFIX
        self.lake_tile_edge_suffix = LAKE_TILE_EDGE_SUFFIX
        self.lake_tile_pixcvec_suffix = LAKE_TILE_PIXCVEC_SUFFIX

        # 2 - Retrieve info from PIXC filename
        tmp_dict = get_info_from_filename(self.pixc_file, "PIXC")
        # 2.1 - Cycle number
        cycle_num = tmp_dict["cycle"]
        if cycle_num is None:
            self.cycle_num = 999
            logger.info(
                "WARNING: cycle number has not been found in PIXC filename %s -> set to default value = %03d",
                os.path.basename(self.pixc_file), self.cycle_num)
        else:
            self.cycle_num = int(cycle_num)
            logger.info("Cycle number = %03d", self.cycle_num)
        # 2.2 - Pass number
        pass_num = int(tmp_dict["pass"])
        if pass_num is None:
            self.pass_num = 999
            logger.info(
                "WARNING: pass number has not been found in PIXC filename %s -> set to default value = %03d",
                os.path.basename(self.pixc_file), self.pass_num)
        else:
            self.pass_num = int(pass_num)
            logger.info("Pass number = %03d", self.pass_num)
        # 2.3 - Tile ref
        self.tile_ref = tmp_dict["tile_ref"]
        if self.tile_ref is None:
            self.tile_ref = "ttts"
            logger.info(
                "WARNING: tile ref has not been found in PIXC filename %s -> set to default value = %s",
                os.path.basename(self.pixc_file), self.tile_ref)
        else:
            logger.info("Tile ref = %s", self.tile_ref)
        # 2.4 - Start date
        self.start_date = tmp_dict["start_date"]
        if self.start_date is None:
            self.start_date = "yyyyMMddThhmmss"
            logger.info(
                "WARNING: start date has not been found in PIXC filename %s -> set to default value = %s",
                os.path.basename(self.pixc_file), self.start_date)
        else:
            logger.info("Start date = %s", self.start_date)
        # 2.5 - Stop date
        self.stop_date = tmp_dict["stop_date"]
        if self.stop_date is None:
            self.stop_date = "yyyyMMddThhmmss"
            logger.info(
                "WARNING: stop date has not been found in PIXC filename %s -> set to default value = %s",
                os.path.basename(self.pixc_file), self.stop_date)
        else:
            logger.info("Stop date = %s", self.stop_date)

        # 3 - Test compatibility of PIXCVecRiver filename with PIXC filename
        tmp_ok = self.test_pixc_vec_river_filename()
        if tmp_ok:
            logger.info(
                "PIXCVecRiver basename %s is compatible with PIXC basename %s",
                os.path.basename(self.pixc_vec_river_file),
                os.path.basename(self.pixc_file))
        else:
            logger.info(
                "WARNING: PIXCVecRiver basename %s IS NOT compatible with PIXC basename %s (cf. above)",
                os.path.basename(self.pixc_vec_river_file),
                os.path.basename(self.pixc_file))

        # 4 - Compute output filenames
        self.compute_lake_tile_filename_shp()  # LakeTile_shp filename
        self.compute_lake_tile_filename_edge()  # LakeTile_edge filename
        self.compute_lake_tile_filename_pixcvec()  # LakeTile_pixcvec filename
Exemplo n.º 11
0
    def __init__(self, in_lake_tile_pixcvec_file_list, in_obj_pixc_edge_sp,
                 in_lake_sp_dir, in_continent):
        """
        This class is designed to process L2_HR_PIXCVec product over a swath .
        All pixels involved in entities covering more than one tile are processed here.
        The geolocation of pixels is improved for those pixels and PIXCVec NetCDF file is updated.
        
        The initialization of a PixCVecSP consists in:
         - set class attributes
         - copy input LakeTile_pixcvec files into PIXCVec files

        NP: this object is related to one swath
        
        :param in_lake_tile_pixcvec_file_list: list of LakeTile_pixcvec files full path concerning current swath
        :type in_lake_tile_pixcvec_file_list: list of string
        :param in_obj_pixc_edge_sp: list of subset of PixC for edge objects current swath
        :type in_obj_pixc_edge_sp: proc_pixc_sp.PixCEdgeSP
        :param in_lake_sp_dir: output LakeSP directory
        :type in_lake_sp_dir: string
        :param in_continent: continent covered by the tile (if global var CONTINENT_FILE exists)
        :type in_continent: string


        Variables of the object:
        
            - From LakeTile_pixcvec file
                - longitude_vectorproc / 1D-array of float: improved longitude of water pixels (= variable named longitude_vectorproc in LakeTile_pixcvec file)
                - latitude_vectorproc / 1D-array of float: improved latitude of water pixels (= variable named latitude_vectorproc in LakeTile_pixcvec file)
                - height_vectorproc / 1D-array of float: improved height of water pixels (= variable named height_vectorproc in LakeTile_pixcvec file)
                - river_lake_other_tag / 1D-array of float: tag associated to river and lake databases (= variable named river_lake_other_tag in LakeTile_pixcvec file)

            - From process:
                - lake_tile_pixcvec_file_list / list of str : list of input LakeTile_pixcvec files
                - pixc_vec_file_list / list of str : list of output PIXCVec files
                - obj_pixc_edge_sp / proc_pixc_sp.PixCEdgeSP object : subset of PixC related to pixels of objects at top/bottom edge of a PixC tile (output of PGE_LakeTile)
                - nb_water_pix / int : number of pixels to process
        """

        # 1 - Init variables
        # Get instance of service config file
        self.cfg = service_config_file.get_instance()
        # List of LakeTile_pixcvec files concerning current swath
        in_lake_tile_pixcvec_file_list.sort()
        self.lake_tile_pixcvec_file_list = in_lake_tile_pixcvec_file_list
        # List of output PIXCVec files
        self.pixc_vec_file_list = []
        # List of PixC_SP objects of current swath
        self.obj_pixc_edge_sp = in_obj_pixc_edge_sp
        # Continent processed
        self.continent = in_continent

        # Initialize PIXCVec variables to 0
        self.longitude_vectorproc = np.zeros(self.obj_pixc_edge_sp.nb_pixels)
        self.latitude_vectorproc = np.zeros(self.obj_pixc_edge_sp.nb_pixels)
        self.height_vectorproc = np.zeros(self.obj_pixc_edge_sp.nb_pixels)

        self.node_id = np.empty(self.obj_pixc_edge_sp.nb_pixels, dtype=object)
        self.node_id[:] = ""
        self.lakedb_id = np.empty(self.obj_pixc_edge_sp.nb_pixels,
                                  dtype=object)
        self.lakedb_id[:] = ""
        self.lakeobs_id = np.empty(self.obj_pixc_edge_sp.nb_pixels,
                                   dtype=object)
        self.lakeobs_id[:] = ""
        self.flag_ice_climato = np.zeros(self.obj_pixc_edge_sp.nb_pixels,
                                         dtype=np.uint8)
        self.flag_ice_dyn = np.zeros(self.obj_pixc_edge_sp.nb_pixels,
                                     dtype=np.uint8)

        # Init a list of tiles ref processed in thios class
        self.tile_number_list = []

        # 2 - List of output files computation
        for lake_tile_pixcvec_file in self.lake_tile_pixcvec_file_list:

            # 2.1 - Compute output PIXCVec file full path
            pixc_vec_file = my_names.compute_pixcvec_filename(
                lake_tile_pixcvec_file, in_lake_sp_dir)

            # 2.2 - Remove if exists
            if os.path.exists(pixc_vec_file):
                os.remove(pixc_vec_file)

            # 2.3 - Copy LakeTile_pixcvec file full path to PIXCVec file list
            self.pixc_vec_file_list.append(pixc_vec_file)

            # 2.3 - Extact tile number from PixC Vec file
            tile_number = int(
                my_names.get_info_from_filename(lake_tile_pixcvec_file,
                                                "LakeTile")["tile_ref"][:-1])
            self.tile_number_list.append(tile_number)
Exemplo n.º 12
0
def relabel_lake_using_segmentation_heigth(in_x, in_y, in_height, in_std_height_max):
    """
    This function main interest is to determine the number of lakes inside a subset of PixC in radar geometry.
    In most of the cases, only one lake is located in the given subset, but in some case of different lakes can
    be gather inside one because of radar geometric distortions or in the case a of a dam.

    Steps :

     1. Creates a 2D height matrix from X and Y 1D vectors
     2. Unsupervised heigth classification to determine number a classes to get an STD over each classe lower than STD_HEIGHT_MAX
     3. Smooth result using a 2D median filter
     4. Return labels

    :param in_x: X indices of "1" pixels
    :type in_x: 1D vector of int
    :param in_y: Y indices of "1" pixels
    :type in_y: 1D vector of int
    :param in_height: height of pixels
    :type in_height: 1D vector of float
    :param in_std_height_max: maximal standard deviation of height inside a lake
    :type in_std_height_max: float

    :return: labels recomputed over 1 of several lakes
    :rtype: 1D vector of int
    """
    # Get instance of service config file
    cfg = service_config_file.get_instance()
    logger = logging.getLogger("my_tools")
    logger.debug("- start -")

    # 0 - Deal with exceptions
    # 0.1 - Input vectors size must be the same
    if (in_x.size != in_y.size) or (in_x.size != in_height.size):
        raise ValueError("relabel_lake_using_segmentation_heigth(in_x, in_y, in_height) : in_x and in_y must be the same size ;" + \
                         "currently : in_x = %d and in_y = %d" % (in_x.size, in_y.size))
    else:
        nb_pts = in_x.size

    # 1 - Compute height matrix
    # 1.1 - Init heigth image
    logger.debug("Building heigth matrix")
    height_img = np.zeros((np.max(in_y) + 1, np.max(in_x) + 1))
    message = "> Height matrix size = (X=%d , Y=%d)" % (np.max(in_y.size) + 1, np.max(in_x.size) + 1)
    logger.debug(message)

    # 1.2 - Put height for every pixels defined by the input vectors
    for ind in range(nb_pts):
        height_img[in_y[ind], in_x[ind]] = in_height[ind]

    logger.debug("K-means processing")

    # 2 - Unsupervised clustering to determine number of classes
    std_heigth = np.std(in_height)
    nb_classes = 1

    # 2.1 - Cas with only one class
    if std_heigth < in_std_height_max:  # If only one lake is in the given pixc subset
        retour = np.ones(nb_pts)  # Return one unique label for all pixc
    else:
        # 2.2 - Init a k-means classifier
        kmeans_classif = KMeans()
        while std_heigth > in_std_height_max:

            nb_classes += 1

            # 2.3 - Cluster height over nb_classes classes
            kmeans_classif = KMeans(n_clusters=nb_classes)
            kmeans_classif.fit(in_height.reshape(-1, 1))  # Turn line into column for in_height

            # 2.3 - Compute heigth std inside each class
            std_heigth = np.max([np.std(in_height[np.where(kmeans_classif.labels_ == curLabel)]) for curLabel in np.unique(kmeans_classif.labels_)])

            # If number of classes upper than 10 => stop iteration
            if nb_classes > 10:
                break

        message = "NB classes : %d, max std : %f " % (nb_classes, std_heigth)
        logger.debug(message)

        # 3 - Format output vector

        # 3.1 - Build a labeled matrix
        labeled_img = np.zeros(height_img.shape).astype('int')
        for ind in range(nb_pts):
            labeled_img[in_y[ind], in_x[ind]] = int(kmeans_classif.labels_[ind])

        # 3.2 - Median filter on a 3x3 window to smooth output
        labeled_img_filted = median_filter(labeled_img.astype('uint8'), square(2).astype('uint8'))

        # 3.3 - Init and fill output label array
        out_labels = np.zeros(in_height.shape)
        for ind in range(nb_pts):
            out_labels[ind] = labeled_img_filted[in_y[ind], in_x[ind]] + 1

        retour = out_labels

    return retour
Exemplo n.º 13
0
def get_concave_hull_from_radar_vectorisation(in_range, in_azimuth, in_v_long, in_v_lat):
    """
    Compute the concave hull of a set of points using radar vectorisation

    :param in_range: range of points
    :type in_range: 1D-array of int
    :param in_azimuth: azimuth of points
    :type in_azimuth: 1D-array of int
    :param in_v_long: longitude of points
    :type in_v_long: 1D-array of float
    :param in_v_lat: latitude of points
    :type in_v_lat: 1D-array of float

    :return: the hull of the input set of points
    :rtype: OGRMultiPolygon
    """
    # Get instance of service config file
    cfg = service_config_file.get_instance()
    # get param nb_pix_max_contour
    nb_pix_max_contour = cfg.getint("CONFIG_PARAMS", "NB_PIX_MAX_CONTOUR")

    logger = logging.getLogger("my_hull")

    # 1 - Get relative range and azimuth (1st pixel = 1)
    lake_x = in_range - np.min(in_range) + 1
    lake_y = in_azimuth - np.min(in_azimuth) + 1

    # 2 - Get image (1 pixel around lake)
    lake_img = my_tools.compute_bin_mat(np.max(lake_x) + 2, np.max(lake_y) + 2, lake_x, lake_y)

    # 3 - Compute boundaries (there might be more than one if there are islands in lake)
    lake_contours = find_contours(lake_img, 0.99999999999)

    # 4 - Round contour range and azimuth coordinates to units, since they are indices in input parameters
    logger.debug("Removing duplicate points from lake contour")
    lake_contour_int = []

    for contour in lake_contours:
        contour_points = []

        [contour_points.append(tuple(np.round(coords, 0))) for coords in contour if
         tuple(np.round(coords, 0)) not in contour_points]

        lake_contour_int.append(contour_points)

    # 5 - Convert (azimuth, range) contour into polygon
    logger.debug("Inital polygon contains 1 external ring and %d holes rings " % (len(lake_contour_int) - 1))

    # multi_ring_list contains every ring composing the polygon, the first ring contains exterior coordinates, all other rings are holes in the polygon
    multi_ring_list = []

    for contour in lake_contour_int:  # Loop over contours
        logger.debug("Building new ring with %d points " % (len(contour)))
        # ============================ HULL COMPUTATION FOR BIG LAKES ===========================
        # ============================  TO CHANGE AS SOON AS POSSIBLE ===========================
        # if the lake contour contains more than 8 000 pixels, the number of pixel is restricted to 8 000..
        if len(contour) > nb_pix_max_contour:
            logger.warning("Number of contour points is reduced to %d points" % (nb_pix_max_contour))
            id_selected_points = np.round(np.linspace(0, len(contour) - 1, nb_pix_max_contour), 0).astype('int')
            contour = [contour[idx] for idx in id_selected_points]
        # =======================================================================================

        list_of_points = []

        for (y, x) in contour:  # Look over azimuth and range indices

            # Retrieve lon/lat coordinates from range and azimtu coordinates
            # if current range and azimuth are found in input range and azimuth list
            if np.where(np.logical_and(lake_x == x, lake_y == y))[0].any():
                point_idx = np.where(np.logical_and(lake_x == x, lake_y == y))[0][0]
                lon = in_v_long[point_idx]
                lat = in_v_lat[point_idx]

                new_point = (lon, lat)

                # Add new point :
                #     - if new_point not in list
                #     - if list contains more than 3 points : check if new points create a crossing between segments
                if new_point not in list_of_points:
                    if len(list_of_points) < 3:
                        list_of_points.append(new_point)
                    else:
                        list_of_points = add_new_point_to_list_of_point(new_point, list_of_points)
                # code commenté : list_of_points = addNewPoint(new_point, list_of_points)
            else:
                logger.debug("Point of coordinates %d, %d not found -> Point removed" % (y, x))

        if not list_of_points:
            logger.debug("Ring contains 0 points => Discarded")
            continue

        # Add first point to the end of list
        ring = build_ring_from_list_of_points(list_of_points)

        # Check if ring does not intersect multi ring
        ring = build_ring_without_integrity_issues_multi_ring(ring, multi_ring_list)

        if len(ring) > 3:
            logger.debug("Adding new ring containing %d points to multi ring" % (len(ring[:-1])))

            multi_ring_list.append(ring)

        else:
            logger.debug("Ring contains less than 2 points => Discarded")

    logger.debug("Building polygon from list of rings")
    lake_poly = get_ogr_polygon_from_ring_list(multi_ring_list)

    if not lake_poly.IsValid():
        logger.debug("Polygon is invalid -> Polygon is downgraded into a valid geometry")
        lake_poly = lake_poly.Buffer(0)
    else:
        logger.debug("Polygon is valid")

    return lake_poly
Exemplo n.º 14
0
    def __init__(self, in_product_type, in_pixcvec_file=None):
        """
        Constructor: init variables and set them with data retrieved from pixel cloud complementary file if asked
        
        :param in_product_type: type of product among "SP"=LakeSP and "TILE"=LakeTile
        :type in_product_type: string
        :param in_pixcvec_file: full path of pixel cloud complementary file 
                                    (L2_HR_PIXCVecRiver file if from PGE_RiverTile 
                                    or LakeTile_piexcvec if from PGE_LakeTile)
        :type in_pixcvec_file: string

        Variables of the object:
            - From L2_HR_PIXCVecRiver:
                - river_idx / 1D-array of int: indices of river pixels within PIXC arrays (= variable named pixc_index in L2_HR_PIXCVecRiver only)
            - From both L2_HR_PIXCVecRiver and LakeTile_pixcvec:
                - range_idx / 1D-array of int: range indices of water pixels (= variable named range_index in L2_HR_PIXCVecRiver and LakeTile_pixcvec)
                - azimuth_idx / 1D-array of int: azimuth indices of water pixels (= variable named azimuth_index in L2_HR_PIXCVecRiver and LakeTile_pixcvec)
                - longitude_vectorproc / 1D-array of float: improved longitude of water pixels (= variable named longitude_vectorproc in L2_HR_PIXCVecRiver and LakeTile_pixcvec)
                - latitude_vectorproc / 1D-array of float: improved latitude of water pixels (= variable named latitude_vectorproc in L2_HR_PIXCVecRiver and LakeTile_pixcvec)
                - height_vectorproc / 1D-array of float: improved height of water pixels (= variable named height_vectorproc in L2_HR_PIXCVecRiver and LakeTile_pixcvec)
                - node_id / 1D-array of float: identifier associated to river node database (= variable named node_index in L2_HR_PIXCVecRiver and node_id in LakeTile_pixcvec)
                - pixcvec_metadata / dict: dictionary of PIXCVec file metadata
            - From L2_HR_PIXC:
                - continent / string: continent covered by the tile (if global var CONTINENT_FILE exists)
            - From processing:
                - nb_water_pix / int: number of water pixels
                - reject_index / 1D-array of int: indices of pixels that are river only, ie not reservoirs or dams
                - nb_river_pix / int: number of river pixels
                - lakedb_id / 1D-array of str: identifier from the lake database (= variable named lakedb_id in LakeTile_pixcvec)
                - lakeobs_id / 1D-array of str: identifier associated to unknown object (= variable named lakeobs_id in LakeTile_pixcvec)
                - ice_clim_flag / 1D-array of int: climatological ice flag
                - ice_dyn_flag / 1D-array of int: dynamical ice flag
        """
        # Get instance of service config file
        self.cfg = service_config_file.get_instance()
        logger = logging.getLogger(self.__class__.__name__)
        logger.info("- start -")

        # Init type
        if in_product_type in ("TILE", "SP"):
            self.product_type = in_product_type
        else:
            logger.debug("Product type %s unknown, set to TILE" %
                         in_product_type)
            self.product_type = "TILE"

        # Init dimension
        self.nb_water_pix = 0

        # Init PIXCVec variables
        self.azimuth_index = None
        self.range_index = None
        self.longitude_vectorproc = None
        self.latitude_vectorproc = None
        self.height_vectorproc = None
        self.node_id = None
        self.lakedb_id = None
        self.lakeobs_id = None
        self.ice_clim_flag = None
        self.ice_dyn_flag = None
        self.pixcvec_metadata = {}
        # Fill variables if filename available
        if in_pixcvec_file is not None:
            self.set_from_pixcvec_file(in_pixcvec_file)

        # Init dictionary of PIXCVec metadata
        self.pixcvec_metadata["cycle_number"] = -9999
        self.pixcvec_metadata["pass_number"] = -9999
        self.pixcvec_metadata["tile_number"] = -9999
        self.pixcvec_metadata["swath_side"] = ""
        self.pixcvec_metadata["tile_name"] = ""
        self.pixcvec_metadata["start_time"] = ""
        self.pixcvec_metadata["stop_time"] = ""
        self.pixcvec_metadata["inner_first_latitude"] = -9999.0
        self.pixcvec_metadata["inner_first_longitude"] = -9999.0
        self.pixcvec_metadata["inner_last_latitude"] = -9999.0
        self.pixcvec_metadata["inner_last_longitude"] = -9999.0
        self.pixcvec_metadata["outer_first_latitude"] = -9999.0
        self.pixcvec_metadata["outer_first_longitude"] = -9999.0
        self.pixcvec_metadata["outer_last_latitude"] = -9999.0
        self.pixcvec_metadata["outer_last_longitude"] = -9999.0
        self.pixcvec_metadata["continent"] = ""
        self.pixcvec_metadata["ellipsoid_semi_major_axis"] = ""
        self.pixcvec_metadata["ellipsoid_flattening"] = ""
        self.pixcvec_metadata["xref_static_river_db_file"] = ""

        # Variables specific to processing
        self.continent = None
        # Specific to LakeTile processing
        if self.product_type == "TILE":
            self.nb_river_pix = 0  # Number of river pixels (used for TILE processing)
            self.river_index = None  # Indices of pixels processed by RiverTile (used in TILE processing)
            self.reject_index = None  # Indices of river pixels (not reservoirs)