def _test_output_directory(self): """ This method test output directory """ logger = logging.getLogger(self.__class__.__name__) message = "[lakeTileProcessing] OUTPUT DIR = %s" % self.output_dir logger.info(message) my_tools.test_dir(self.output_dir)
def _test_input_directories(self): """ This method test mandatory input directory """ logger = logging.getLogger(self.__class__.__name__) # 1.1 - Laketile shp directory message = "> 1.1 INPUT Laketile shp directory = %s" % self.laketile_shp_dir logger.info(message) my_tools.test_dir(self.laketile_shp_dir) # 1.2 - Laketile edge directory message = "> 1.2 INPUT Laketile edge directory = %s" % self.laketile_edge_dir logger.info(message) my_tools.test_dir(self.laketile_edge_dir) # 1.3 - Laketile pixcvec directory message = "> 1.3 INPUT Laketile pixcvec directory = %s" % self.laketile_pixcvec_dir logger.info(message) my_tools.test_dir(self.laketile_pixcvec_dir)
def _check_config_parameters(self): """ Check parameters coherence for LakeTile parameter file :return: True if OK """ logger = logging.getLogger(self.__class__.__name__) try: # 1 - Config parameters from command file # 1.1 - PATH section # PIXC file self.cfg.test_var_config_file('PATHS', 'PIXC file', str) my_tools.test_file(self.cfg.get('PATHS', 'PIXC file')) logger.debug('PIXC file = ' + str(self.cfg.get('PATHS', 'PIXC file'))) # PIXCVecRiver file self.cfg.test_var_config_file('PATHS', 'PIXCVecRiver file', str) my_tools.test_file(self.cfg.get('PATHS', 'PIXCVecRiver file')) logger.debug('PIXCvecRiver file = ' + str(self.cfg.get('PATHS', 'PIXCVecRiver file'))) # Output directory self.cfg.test_var_config_file('PATHS', 'Output directory', str) my_tools.test_dir(self.cfg.get('PATHS', 'Output directory')) logger.debug('Output directory = ' + str(self.cfg.get('PATHS', 'Output directory'))) # 1.2 - DATABASES section # Lake database full path if self.cfg.get('DATABASES', 'LAKE_DB') is None: logger.debug( 'LAKE_DB not filled => LakeTile product not linked to a lake database and continent' ) elif self.cfg.get('DATABASES', 'LAKE_DB').endswith(".shp"): self.cfg.test_var_config_file('DATABASES', 'LAKE_DB', str) my_tools.test_file(self.cfg.get('DATABASES', 'LAKE_DB')) logger.debug('LAKE_DB = ' + str(self.cfg.get('DATABASES', 'LAKE_DB'))) # Lake identifier attribute name in the database if self.cfg.get('DATABASES', 'LAKE_DB_ID'): self.cfg.test_var_config_file('DATABASES', 'LAKE_DB_ID', str) logger.debug('LAKE_DB_ID = ' + str(self.cfg.get('DATABASES', 'LAKE_DB_ID'))) else: logger.warning( 'LAKE_DB file given but the lake_id fieldname is missing' ) elif self.cfg.get('DATABASES', 'LAKE_DB').endswith(".sqlite"): self.cfg.test_var_config_file('DATABASES', 'LAKE_DB', str) my_tools.test_file(self.cfg.get('DATABASES', 'LAKE_DB')) logger.debug('LAKE_DB = ' + str(self.cfg.get('DATABASES', 'LAKE_DB'))) else: logger.debug('Unknown LAKE_DB file format for file : %s' % (self.cfg.get('DATABASES', 'LAKE_DB'))) # 1.3 - OPTIONS section # Shapefile production self.cfg.test_var_config_file('OPTIONS', 'Produce shp', bool) logger.debug('Produce shp = ' + str(self.cfg.get('OPTIONS', 'Produce shp'))) # 2 - Config parameters from parameter file # 2.1 - CONFIG_PARAMS section # Water flag = 3=water near land edge 4=interior water self.cfg.test_var_config_file('CONFIG_PARAMS', 'FLAG_WATER', str, val_default="3;4", logger=logger) logger.debug('FLAG_WATER = ' + str(self.cfg.get('CONFIG_PARAMS', 'FLAG_WATER'))) # Dark water flag = 23=darkwater near land 24=interior dark water self.cfg.test_var_config_file('CONFIG_PARAMS', 'FLAG_DARK', str, val_default="23;24", logger=logger) logger.debug('FLAG_DARK = ' + str(self.cfg.get('CONFIG_PARAMS', 'FLAG_DARK'))) # Min size for a lake to generate a lake product (=polygon + attributes) for it self.cfg.test_var_config_file('CONFIG_PARAMS', 'MIN_SIZE', float, val_default=1.0, logger=logger) logger.debug('MIN_SIZE = ' + str(self.cfg.get('CONFIG_PARAMS', 'MIN_SIZE'))) # Maximal standard deviation of height inside a lake (-1 = do not compute lake height segmentation) self.cfg.test_var_config_file('CONFIG_PARAMS', 'STD_HEIGHT_MAX', float, val_default=-1.0) logger.debug('STD_HEIGHT_MAX = ' + str(self.cfg.get('CONFIG_PARAMS', 'STD_HEIGHT_MAX'))) # To improve PixC golocation (=True) or not (=False) self.cfg.test_var_config_file('CONFIG_PARAMS', 'IMP_GEOLOC', bool, val_default=True, logger=logger) logger.debug('IMP_GEOLOC = ' + str(self.cfg.get('CONFIG_PARAMS', 'IMP_GEOLOC'))) # Method to compute lake boundary or polygon hull # 0 = convex hull # 1.0 = concave hull computed in ground geometry, based on Delaunay triangulation - using CGAL library # 1.1 = concave hull computed in ground geometry, based on Delaunay triangulation - with alpha parameter varying across-track # 2 = edge computed in radar geometry, then converted in ground geometry (default) self.cfg.test_var_config_file('CONFIG_PARAMS', 'HULL_METHOD', float, valeurs=[0, 1.0, 1.1, 2], val_default=2, logger=logger) logger.debug('HULL_METHOD = ' + str(self.cfg.get('CONFIG_PARAMS', 'HULL_METHOD'))) # max number of pixel for hull computation 1 self.cfg.test_var_config_file('CONFIG_PARAMS', 'NB_PIX_MAX_DELAUNEY', int, val_default=100000, logger=logger) logger.debug( 'NB_PIX_MAX_DELAUNEY = ' + str(self.cfg.get('CONFIG_PARAMS', 'NB_PIX_MAX_DELAUNEY'))) # max number of contour points for hull computation 2 self.cfg.test_var_config_file('CONFIG_PARAMS', 'NB_PIX_MAX_CONTOUR', int, val_default=8000, logger=logger) logger.debug( 'NB_PIX_MAX_CONTOUR = ' + str(self.cfg.get('CONFIG_PARAMS', 'NB_PIX_MAX_CONTOUR'))) # Big lakes parameters for improved geoloc self.cfg.test_var_config_file('CONFIG_PARAMS', 'BIGLAKE_MODEL', str, valeurs=["polynomial", "no"], val_default="polynomial", logger=logger) logger.debug('BIGLAKE_MODEL = ' + str(self.cfg.get('CONFIG_PARAMS', 'BIGLAKE_MODEL'))) self.cfg.test_var_config_file('CONFIG_PARAMS', 'BIGLAKE_MIN_SIZE', float, val_default=50000000.0, logger=logger) logger.debug( 'BIGLAKE_MIN_SIZE = ' + str(self.cfg.get('CONFIG_PARAMS', 'BIGLAKE_MIN_SIZE'))) self.cfg.test_var_config_file('CONFIG_PARAMS', 'BIGLAKE_GRID_SPACING', float, val_default=4000, logger=logger) logger.debug( 'BIGLAKE_GRID_SPACING = ' + str(self.cfg.get('CONFIG_PARAMS', 'BIGLAKE_GRID_SPACING'))) self.cfg.test_var_config_file('CONFIG_PARAMS', 'BIGLAKE_GRID_RES', float, val_default=8000, logger=logger) logger.debug( 'BIGLAKE_GRID_RES = ' + str(self.cfg.get('CONFIG_PARAMS', 'BIGLAKE_GRID_RES'))) # 2.2 - ID section # Nb digits for counter of lakes in a tile or pass self.cfg.test_var_config_file('ID', 'NB_DIGITS', str, val_default=6, logger=logger) logger.debug('NB_DIGITS = ' + str(self.cfg.get('ID', 'NB_DIGITS'))) # 2.3 - FILE_INFORMATION section # Product generator self.cfg.test_var_config_file('FILE_INFORMATION', 'PRODUCER', str) logger.debug('PRODUCER = ' + str(self.cfg.get('FILE_INFORMATION', 'PRODUCER'))) # Composite Release IDentifier for LakeTile processing self.cfg.test_var_config_file('CRID', 'LAKE_TILE_CRID', str) logger.debug('LAKE_TILE_CRID = ' + str(self.cfg.get('CRID', 'LAKE_TILE_CRID'))) # Composite Release IDentifier for LakeSP processing self.cfg.test_var_config_file('CRID', 'LAKE_SP_CRID', str) logger.debug('LAKE_SP_CRID = ' + str(self.cfg.get('CRID', 'LAKE_SP_CRID'))) # Error managed except service_error.ConfigFileError: message = "Error in the configuration file " + self.cfg.path_conf logger.error(message, exc_info=True) raise # Warning error not managed ! except Exception: logger.error( "Something wrong happened during configuration file check!", exc_info=True) raise return True
def run_preprocessing(self): """ Retrieve the list of input files, i.e. L2_HR_PIXC tile file main and associated L2_HR_PIXCVec """ print("") print("") print("[multiLakeSPProcessing] PRE-PROCESSING...") print("") # 1 - Test existence of directories print( "[multiLakeSPProcessing] > 1 - Testing existence of working directories..." ) # 1.1 - LakeTile shp directory print( "[multiLakeSPProcessing] INPUT DIR for LakeTile shp directory = %s" % self.laketile_shp_dir) my_tools.test_dir(self.laketile_shp_dir) # 1.2 - LakeTile egde directory print( "[multiLakeSPProcessing] INPUT DIR for LakeTile edge directory = %s" % self.laketile_edge_dir) my_tools.test_dir(self.laketile_edge_dir) # 1.3 - LakeTile pixcvec directory print( "[multiLakeSPProcessing] INPUT DIR for LakeTile pixcvec directory = %s" % self.laketile_pixcvec_dir) my_tools.test_dir(self.laketile_pixcvec_dir) # 1.4 - Output directory print("[multiLakeSPProcessing] OUTPUT DIR = %s" % self.output_dir) my_tools.test_dir(self.output_dir) print("") # 2 - Get input files print("[multiLakeSPProcessing] > 2 - Retrieving input files ...") # 2.1 - List all files in self.laketile_shp_dir tmp_list = os.listdir(self.laketile_shp_dir) # 2.2 - Compute file prefix regarding cycle / pass / tile conditions lake_tile_prefix = "SWOT_L2_HR_LakeTile_" # from my_var.LAKE_TILE_PREFIX lake_tile_shp_suffix = ".shp" # from my_var.LAKE_TILE_SHP_SUFFIX lake_tile_edge_suffix = "_edge.nc" # from my_var.LAKE_TILE_EDGE_SUFFIX lake_tile_pixcvec_suffix = "_pixcvec.nc" # from my_var.LAKE_TILE_PIXCVEC_SUFFIX cond_prefix = lake_tile_prefix # Deal with all laketile files in laketile directories if (self.cycle_num is None) or (self.cycle_num == "-1"): print( "[multiLakeSPProcessing] All LakeTile files in the input directories" ) else: # Deal with LakeTile files with cycle = self.cycle_num cond_prefix += "%03d" % self.cycle_num if (self.pass_num is None) or (self.pass_num == "-1"): print( "[multiLakeSPProcessing] Lake tile files with cycle=%03d" % self.cycle_num) else: # Deal with LakeTile files with cycle = self.cycle_num and pass = self.pass_num print( "[multiLakeSPProcessing] Lake tile files with cycle=%03d and pass=%03d" % (self.cycle_num, self.pass_num)) cond_prefix += "_%03d" % self.pass_num # 2.3 - For each listed laketile shp file, get related laketile edge and pixcvec files if they exist for cur_item in tmp_list: # Test if file meets the condition of laketile shp file if cur_item.startswith(cond_prefix) and cur_item.endswith( lake_tile_shp_suffix): information = locnes_filenames.get_info_from_filename( cur_item, "LakeTile") cycle_num = information["cycle"] pass_num = information["pass"] self.cycle_pass_set.add((cycle_num, pass_num)) print("[multiLakeSPProcessing] --> %d cycle and pass to deal with" % len(self.cycle_pass_set)) for (cycle_num, pass_num) in self.cycle_pass_set: print("[multiLakeSPProcessing] cycle %s pass %s " % (cycle_num, pass_num)) print("")
def run_preprocessing(self): """ Retrieve the list of input files, i.e. L2_HR_PIXC tile file main and associated L2_HR_PIXCVec """ print("") print("") print("[multiLakeTileProcessing] PRE-PROCESSING...") print("") # 1 - Test existence of directories print("[multiLakeTileProcessing] > 1 - Testing existence of working directories...") # 1.1 - PIXC directory print("[multiLakeTileProcessing] INPUT DIR for PIXC files = %s" % self.pixc_dir) my_tools.test_dir(self.pixc_dir) # 1.3 - PIXCVecRiver directory print("[multiLakeTileProcessing] INPUT DIR for PIXCVecRiver files = %s" % self.pixc_vec_river_dir) my_tools.test_dir(self.pixc_vec_river_dir) # 1.4 - Output directory print("[multiLakeTileProcessing] OUTPUT DIR = %s" % self.output_dir) my_tools.test_dir(self.output_dir) print("") # 2 - Get input files print("[multiLakeTileProcessing] > 2 - Retrieving input files...") # 2.1 - Compute file prefix regarding cycle / pass / tile conditions pixc_prefix = locnes_filenames.PIXC_PREFIX pixcvec_river_prefix = locnes_filenames.PIXCVEC_RIVER_PREFIX cond_prefix = pixc_prefix # Deal with all PixC files in self.pixc_dir if (self.cycle_num is None) or (self.cycle_num == "-1"): print("[multiLakeTileProcessing] All PixC files in the input directory") else: # Deal with PixC files with cycle = self.cycle_num cond_prefix += "%03d" % self.cycle_num if (self.pass_num is None) or (self.pass_num == "-1"): print("[multiLakeTileProcessing] PixC files with cycle=%03d" % self.cycle_num) else: # Deal with PixC files with cycle = self.cycle_num and pass = self.pass_num cond_prefix += "_%03d" % self.pass_num if self.tile_ref is not None: # Deal with PixC files with cycle = self.cycle_num, pass = self.pass_num and tile id = self.tile_ref print("[multiLakeTileProcessing] PixC files with cycle=%03d , pass=%03d , tile=%s" % (self.cycle_num, self.pass_num, self.tile_ref)) cond_prefix += "_%s" % self.tile_ref else: print("[multiLakeTileProcessing] PixC files with cycle=%03d and pass=%03d" % (self.cycle_num, self.pass_num)) # 2.2 - List all files in self.pixc_dir tmp_list = os.listdir(self.pixc_dir) # 2.3 - For each listed file, get related PIXCVecRiver files if they exist cur_pixc_vec_river = None for cur_item in tmp_list: # Test if file meets the condition if cur_item.startswith(cond_prefix): # Test if it's a wanted PIXC file # Associated PIXCVecRiver file name cur_pixc_vec_river = cur_item.replace(pixc_prefix, pixcvec_river_prefix) # If associated PIXCVecRiver file exists, add pair of filenames if os.path.exists(os.path.join(self.pixc_vec_river_dir, cur_pixc_vec_river)): self.list_pixc.append(cur_item) self.list_pixc_vec_river.append(cur_pixc_vec_river) self.nb_input += 1 print("[multiLakeTileProcessing] --> %d tile(s) to deal with" % self.nb_input) print("")