def overwriteConfig_from_xml(IN_xml_tree): """ Set global variables from an XML tree :param IN_xml_tree: XML tree (typically from .shp.xml file) :type IN_xml_tree: etree.parse """ # Lake database global LAKE_DB lake_db_file = IN_xml_tree.xpath( "//LakeTile_shp/config_params/lake_db")[0].text import cnes.common.lib.my_tools as my_tools my_tools.testFile(lake_db_file) # Test existence of file LAKE_DB = lake_db_file print("> LAKE_DB = %s" % LAKE_DB) # Lake identifier attribute name in the database global LAKE_DB_ID lake_db_id = IN_xml_tree.xpath( "//LakeTile_shp/config_params/lake_db_id")[0].text LAKE_DB_ID = lake_db_id print("> LAKE_DB_ID = %s" % LAKE_DB_ID) # Shapefile with polygons of continents global CONTINENT_FILE try: continent_file = IN_xml_tree.xpath( "//LakeTile_shp/config_params/continent_file")[0].text import cnes.common.lib.my_tools as my_tools my_tools.testFile(continent_file) # Test existence of file CONTINENT_FILE = continent_file print("> CONTINENT_FILE = %s" % CONTINENT_FILE) except: CONTINENT_FILE = None print( "> No value for CONTINENT_FILE => LAKE_SP product not linked to a continent" ) # Water flags global FLAG_WATER FLAG_WATER = IN_xml_tree.xpath( "//LakeTile_shp/config_params/flag_water")[0].text print("> FLAG_WATER = %s" % FLAG_WATER) # Dark water flags global FLAG_DARK FLAG_DARK = IN_xml_tree.xpath( "//LakeTile_shp/config_params/flag_dark")[0].text print("> FLAG_DARK = %s" % FLAG_DARK) # Min size for lake product computation global MIN_SIZE MIN_SIZE = float( IN_xml_tree.xpath("//LakeTile_shp/config_params/min_size")[0].text) print("> MIN_SIZE = %s" % MIN_SIZE) # Improve geolocation or not global IMP_GEOLOC IMP_GEOLOC = bool( IN_xml_tree.xpath("//LakeTile_shp/config_params/imp_geoloc")[0].text) print("> IMP_GEOLOC = %s" % IMP_GEOLOC) # Hull method global HULL_METHOD HULL_METHOD = float( IN_xml_tree.xpath("//LakeTile_shp/config_params/hull_method")[0].text) print("> HULL_METHOD = %s" % HULL_METHOD) # Maximal standard deviation of height inside a lake global STD_HEIGHT_MAX STD_HEIGHT_MAX = float( IN_xml_tree.xpath("//LakeTile_shp/config_params/std_height_max") [0].text) print("> STD_HEIGHT_MAX = %s" % STD_HEIGHT_MAX) # Model to deal with big lake processing global BIGLAKE_MODEL BIGLAKE_MODEL = IN_xml_tree.xpath( "//LakeTile_shp/config_params/biglake_model")[0].text print("> BIGLAKE_MODEL = %s" % BIGLAKE_MODEL) # Min size for lake to be considered as big global BIGLAKE_MIN_SIZE BIGLAKE_MIN_SIZE = int( IN_xml_tree.xpath("//LakeTile_shp/config_params/biglake_min_size") [0].text) print("> BIGLAKE_MIN_SIZE = %s" % BIGLAKE_MIN_SIZE) # Grid spacing for lake height smoothing global BIGLAKE_GRID_SPACING BIGLAKE_GRID_SPACING = int( IN_xml_tree.xpath("//LakeTile_shp/config_params/biglake_grid_spacing") [0].text) print("> BIGLAKE_GRID_SPACING = %s" % BIGLAKE_GRID_SPACING) # Grid resolution for lake height smoothing global BIGLAKE_GRID_RES BIGLAKE_GRID_RES = int( IN_xml_tree.xpath("//LakeTile_shp/config_params/biglake_grid_res") [0].text) print("> BIGLAKE_GRID_RES = %s" % BIGLAKE_GRID_RES)
# 1.2 - Read values according to indir_or_param_file values if os.path.isdir(args.indir_or_param_file): # Read inline parameters location = "inline command" laketile_dir = args.indir_or_param_file output_dir = args.output_dir cycle_num = args.cycle_num pass_num = args.pass_num shp_option = args.shp else: file_base, file_extent = os.path.splitext(args.indir_or_param_file) if file_extent == ".cfg": # Read parameter file location = "parameter file" my_tools.testFile(args.indir_or_param_file, IN_extent=".cfg") # Test existance and extension my_params = readParamFile(args.indir_or_param_file) # Read parameters laketile_dir = my_params["laketile_dir"] output_dir = my_params["output_dir"] cycle_num = my_params["cycle_num"] pass_num = my_params["pass_num"] shp_option = my_params["flag_prod_shp"] else: print("[ERROR]") print("Run by pge_lake_sp.py param_file.cfg [-l] [-v VERBOSE]") print("OR pge_lake_sp.py lake_tile_dir output_dir cycle_num pass_num [-shp] [-l] [-v VERBOSE]") sys.exit("indir_or_param_file is %s, not .cfg" % file_extent) # 1.3 - Test input params have been filled # 1.3.1 - LakeTile directory
def overwriteConfig_from_cfg(IN_config): """ Set global variables if overwritten in a parameter file :param IN_config: configuration parameters :type IN_config: ConfigParser.reader """ if "CONFIG_OVERWRITE" in IN_config.sections(): list_over = IN_config.options("CONFIG_OVERWRITE") # Lake database if "lake_db" in list_over: global LAKE_DB lake_db_file = IN_config.get("CONFIG_OVERWRITE", "LAKE_DB") import cnes.common.lib.my_tools as my_tools my_tools.testFile(lake_db_file) # Test existence of file LAKE_DB = lake_db_file print("> LAKE_DB = %s" % LAKE_DB) else: print("> Default value for LAKE_DB = %s" % LAKE_DB) # Lake identifier attribute name in the database if "lake_db_id" in list_over: global LAKE_DB_ID lake_db_id = IN_config.get("CONFIG_OVERWRITE", "LAKE_DB_ID") LAKE_DB_ID = lake_db_id print("> LAKE_DB_ID = %s" % LAKE_DB_ID) else: print("> Default value for LAKE_DB_ID = %s" % LAKE_DB_ID) # Shapefile with polygons of continents if "continent_file" in list_over: global CONTINENT_FILE continent_file = IN_config.get("CONFIG_OVERWRITE", "CONTINENT_FILE") import cnes.common.lib.my_tools as my_tools my_tools.testFile(continent_file) # Test existence of file CONTINENT_FILE = continent_file print("> CONTINENT_FILE = %s" % CONTINENT_FILE) else: try: print("> Default value for CONTINENT_FILE = %s" % CONTINENT_FILE) except: CONTINENT_FILE = None print( "> No value for CONTINENT_FILE => LAKE_TILE product not linked to a continent" ) # Water flags if "flag_water" in list_over: global FLAG_WATER FLAG_WATER = IN_config.get("CONFIG_OVERWRITE", "FLAG_WATER") print("> FLAG_WATER = %s" % FLAG_WATER) else: print("> Default value for FLAG_WATER = %s" % FLAG_WATER) # Dark water flags if "flag_dark" in list_over: global FLAG_DARK FLAG_DARK = IN_config.get("CONFIG_OVERWRITE", "FLAG_DARK") print("> FLAG_DARK = %s" % FLAG_DARK) else: print("> Default value for FLAG_DARK = %s" % FLAG_DARK) # Hull method if "hull_method" in list_over: global HULL_METHOD HULL_METHOD = IN_config.getfloat("CONFIG_OVERWRITE", "HULL_METHOD") print("> HULL_METHOD = %s" % HULL_METHOD) else: print("> Default value for HULL_METHOD = %s" % HULL_METHOD) # Std height max if "std_height_max" in list_over: global STD_HEIGHT_MAX STD_HEIGHT_MAX = IN_config.getfloat("CONFIG_OVERWRITE", "STD_HEIGHT_MAX") print("> STD_HEIGHT_MAX = %s" % STD_HEIGHT_MAX) else: print("> Default value for STD_HEIGHT_MAX = %s" % STD_HEIGHT_MAX) # Model to deal with big lake processing if "biglake_model" in list_over: global BIGLAKE_MODEL BIGLAKE_MODEL = IN_config.get("CONFIG_OVERWRITE", "BIGLAKE_MODEL") print("> BIGLAKE_MODEL = %s" % BIGLAKE_MODEL) else: print("> Default value for BIGLAKE_MODEL = %s" % BIGLAKE_MODEL) # Min size for lake to be considered as big if "biglake_min_size" in list_over: global BIGLAKE_MIN_SIZE BIGLAKE_MIN_SIZE = IN_config.getint("CONFIG_OVERWRITE", "BIGLAKE_MIN_SIZE") print("> BIGLAKE_MIN_SIZE = %s" % BIGLAKE_MIN_SIZE) else: print("> Default value for BIGLAKE_MIN_SIZE = %s" % BIGLAKE_MIN_SIZE) # Grid spacing for lake height smoothing if "biglake_grid_spacing" in list_over: global BIGLAKE_GRID_SPACING BIGLAKE_GRID_SPACING = IN_config.getint("CONFIG_OVERWRITE", "BIGLAKE_GRID_SPACING") print("> BIGLAKE_GRID_SPACING = %s" % BIGLAKE_GRID_SPACING) else: print("> Default value for BIGLAKE_GRID_SPACING = %s" % BIGLAKE_GRID_SPACING) # Grid resolution for lake height smoothing if "biglake_grid_res" in list_over: global BIGLAKE_GRID_RES BIGLAKE_GRID_RES = IN_config.getint("CONFIG_OVERWRITE", "BIGLAKE_GRID_RES") print("> BIGLAKE_GRID_RES = %s" % BIGLAKE_GRID_RES) else: print("> Default value for BIGLAKE_GRID_RES = %s" % BIGLAKE_GRID_RES)
parser = argparse.ArgumentParser( description= "Compute SWOT LakeTile products from multiple tiles of PIXC products and their associated PIXCVecRiver products." ) parser.add_argument("command_file", help="command file (*.cfg)") args = parser.parse_args() print("===== multiLakeTileProcessing = BEGIN =====") print("") timer = my_timer.Timer() timer.start() # 1 - Read command file print("WORKING VARIABLES") print() my_tools.testFile(args.command_file, IN_extent=".cfg") # Test existance and extension my_params = read_command_file( args.command_file) # Read variables in command file # 2 - Initialization multi_lake_tile = MultiLakeTile(my_params) print(timer.info(0)) # 3 - Run pre-processing multi_lake_tile.run_preprocessing() print(timer.info(0)) # 4 - Run processing multi_lake_tile.run_processing() print(timer.info(0))
def run_preprocessing(self): """ Process PGE_LakeTile IN, i.e. test input paths, retrieve orbit infos, open lake database and init objects """ logger = logging.getLogger(self.__class__.__name__) logger.info("") logger.info("") logger.info("**************************") logger.info("***** PRE-PROCESSING *****") logger.info("**************************") logger.info("") # 1 - Test existance and file format of input paths logger.info("> 1 - Testing existence of input paths...") # 1.1 - PIXC file message = " INPUT PIXC file = %s" % self.pixc_file logger.info(message) my_tools.testFile(self.pixc_file, IN_extent=".nc") # 1.2 - PIXCVecRiver file message = " INPUT PIXCVecRiver file = %s" % self.pixc_vec_river_file logger.info(message) my_tools.testFile(self.pixc_vec_river_file, IN_extent=".nc") # 1.3 - Output directory message = " OUTPUT DIR = %s" % self.output_dir logger.info(message) my_tools.testDir(self.output_dir) logger.info("") # 2 - Retrieve orbit info from PIXC filename and compute output filenames logger.info("> 2 - Retrieving tile infos from PIXC filename...") self.lake_tile_filenames = my_names.lakeTileFilenames( self.pixc_file, self.pixc_vec_river_file, self.output_dir) logger.info("") # 3 - Objects initialisation logger.info("> 3 - Init and format intput objects...") logger.info("") # 3.1 - Init PIXCVec product by retrieving data from the pixel cloud complementary file after river processing logger.info("> 3a - Init pixel cloud complementary file...") self.objPixcVec = proc_pixc_vec.PixelCloudVec("TILE") self.objPixcVec.set_from_pixcvec_file(self.pixc_vec_river_file) logger.info("") # 3.2 - Retrieve needed data from the pixel cloud logger.info("> 3b - Retrieving needed data from the pixel cloud...") self.objPixc = proc_pixc.PixelCloud() self.objPixc.set_from_pixc_file(self.pixc_file, self.objPixcVec.reject_index) logger.info("") # 3.3 - Reshape PIXCVec arrays logger.info("> 3c - Reshape PIXCVecRiver arrays...") self.objPixcVec.reshape(self.objPixc) logger.info("") # 4 - Retrieve lake Db layer logger.info("> 4 - Retrieving lake database layer...") if my_var.LAKE_DB == "": logger.info( "NO database specified -> NO link of SWOT obs with a priori lake" ) else: if os.path.exists(my_var.LAKE_DB): type_db = my_var.LAKE_DB.split('.')[-1] # Type of database if type_db == "shp": # Shapefile format self.objLakeDb = lake_db.LakeDb_shp( my_var.LAKE_DB, self.objPixc.tile_poly) elif type_db == "sqlite": # SQLite format self.objLakeDb = lake_db.LakeDb_sqlite( my_var.LAKE_DB, self.objPixc.tile_poly) else: message = "Lake a priori database format (%s) is unknown: must be .shp or .sqlite" % type_db raise service_error.ProcessingError(message, logger) else: message = " ERROR = %s doesn't exist" % my_var.LAKE_DB raise service_error.ProcessingError(message, logger) logger.info("") # 5 - Initialize lake product logger.info("> 5 - Init lake product object...") self.objLake = proc_lake.LakeProduct( "TILE", self.objPixc, self.objPixcVec, self.objLakeDb, os.path.basename( self.lake_tile_filenames.lake_tile_shp_file).split(".")[0], in_id_prefix=self.lake_tile_filenames.lake_id_prefix) logger.info("")
def __init__(self, cmd_file): """ Constructor of PGELakeTile :param cmdFile: command file full path :type cmdFile: string """ # 0 - Init timer self.timer = my_timer.Timer() self.timer.start() # 1 - Load command file self.cmd_file = cmd_file my_tools.testFile(cmd_file, IN_extent=".cfg") # Test existance and extension my_params = self._read_cmd_file() # Read parameters self.pixc_file = my_params["pixc_file"] self.pixc_vec_river_file = my_params["pixc_vec_river_file"] self.output_dir = my_params["output_dir"] # 2 - Load parameter file # 2.1 - Read value from command file file_config = my_params["param_file"] # 2.2 - Test existence if not os.path.exists(file_config): raise service_error.DirFileError(file_config) # 2.3 - Load parameters self.cfg = service_config_file.ServiceConfigFile(file_config) # 3 - Put command parameter inside cfg self._put_cmd_value(my_params) # 4 - Initiate logging service service_logger.ServiceLogger() logger = logging.getLogger(self.__class__.__name__) # 5 - Print info logger.info("======================================") logger.info("===== lakeTileProcessing = BEGIN =====") logger.info("======================================") message = "> Command file: " + str(self.cmd_file) logger.info(message) message = "> " + str(self.cfg) logger.info(message) logger.info("") # 6 - Test input parameters logger.info(">> Test input parameters") self._check_config_parameters() logger.info("") # 7 - Update global variables # TODO replace all global variables by call to service_config_file my_var.tmpGetConfigFromServiceConfigFile() # 8 - Form processing metadata dictionary self.proc_metadata = {} self.proc_metadata["xref_static_lake_db_file"] = my_var.LAKE_DB self.proc_metadata["xref_input_l2_hr_pixc_file"] = self.pixc_file self.proc_metadata[ "xref_input_l2_hr_pixc_vec_river_file"] = self.pixc_vec_river_file self.proc_metadata["xref_l2_hr_lake_tile_param_file"] = file_config logger.info("") logger.info("")
def _check_config_parameters(self): """ Check parameters coherence for LakeTile parameter file :return: True if OK """ logger = logging.getLogger(self.__class__.__name__) try: # 1 - Config parameters from command file # 1.1 - PATH section # PIXC file self.cfg.test_var_config_file('PATHS', 'PIXC file', str) my_tools.testFile(self.cfg.get('PATHS', 'PIXC file')) logger.debug('PIXC file = ' + str(self.cfg.get('PATHS', 'PIXC file'))) # PIXCVecRiver file self.cfg.test_var_config_file('PATHS', 'PIXCVecRiver file', str) my_tools.testFile(self.cfg.get('PATHS', 'PIXCVecRiver file')) logger.debug('PIXCvecRiver file = ' + str(self.cfg.get('PATHS', 'PIXCVecRiver file'))) # Output directory self.cfg.test_var_config_file('PATHS', 'Output directory', str) my_tools.testDir(self.cfg.get('PATHS', 'Output directory')) logger.debug('Output directory = ' + str(self.cfg.get('PATHS', 'Output directory'))) # 1.2 - DATABASES section # Lake database full path self.cfg.test_var_config_file('DATABASES', 'LAKE_DB', str) my_tools.testFile(self.cfg.get('DATABASES', 'LAKE_DB')) logger.debug('LAKE_DB = ' + str(self.cfg.get('DATABASES', 'LAKE_DB'))) # Lake identifier attribute name in the database self.cfg.test_var_config_file('DATABASES', 'LAKE_DB_ID', str) logger.debug('LAKE_DB_ID = ' + str(self.cfg.get('DATABASES', 'LAKE_DB_ID'))) # Continent file if want LakeSP product split per continent if self.cfg.get('DATABASES', 'CONTINENT_FILE') is None: logger.debug( 'CONTINENT_FILE not filled => LakeTile product not linked to a continent' ) else: self.cfg.test_var_config_file('DATABASES', 'CONTINENT_FILE', str) my_tools.testFile(self.cfg.get('DATABASES', 'CONTINENT_FILE')) logger.debug('CONTINENT_FILE = ' + str(self.cfg.get('DATABASES', 'CONTINENT_FILE'))) # 1.3 - OPTIONS section # Shapefile production self.cfg.test_var_config_file('OPTIONS', 'Produce shp', bool) logger.debug('Produce shp = ' + str(self.cfg.get('OPTIONS', 'Produce shp'))) # 2 - Config parameters from parameter file # 2.1 - CONFIG_PARAMS section # Water flag = 3=water near land edge 4=interior water self.cfg.test_var_config_file('CONFIG_PARAMS', 'FLAG_WATER', str) logger.debug('FLAG_WATER = ' + str(self.cfg.get('CONFIG_PARAMS', 'FLAG_WATER'))) # Dark water flag = 23=darkwater near land 24=interior dark water self.cfg.test_var_config_file('CONFIG_PARAMS', 'FLAG_DARK', str) logger.debug('FLAG_DARK = ' + str(self.cfg.get('CONFIG_PARAMS', 'FLAG_DARK'))) # Min size for a lake to generate a lake product (=polygon + attributes) for it self.cfg.test_var_config_file('CONFIG_PARAMS', 'MIN_SIZE', float) logger.debug('MIN_SIZE = ' + str(self.cfg.get('CONFIG_PARAMS', 'MIN_SIZE'))) # Maximal standard deviation of height inside a lake self.cfg.test_var_config_file('CONFIG_PARAMS', 'STD_HEIGHT_MAX', float) logger.debug('STD_HEIGHT_MAX = ' + str(self.cfg.get('CONFIG_PARAMS', 'STD_HEIGHT_MAX'))) # To improve PixC golocation (=True) or not (=False) self.cfg.test_var_config_file('CONFIG_PARAMS', 'IMP_GEOLOC', bool) logger.debug('IMP_GEOLOC = ' + str(self.cfg.get('CONFIG_PARAMS', 'IMP_GEOLOC'))) # Method to compute lake boundary or polygon hull # 0=convex hull 1=concav hull (1.0=with alpha param (default) 1.1=without) 2=concav hull radar vectorisation self.cfg.test_var_config_file('CONFIG_PARAMS', 'HULL_METHOD', float, valeurs=[0, 1, 1.1, 2]) logger.debug('HULL_METHOD = ' + str(self.cfg.get('CONFIG_PARAMS', 'HULL_METHOD'))) # Big lakes parameters for improved geoloc self.cfg.test_var_config_file('CONFIG_PARAMS', 'BIGLAKE_MODEL', str, valeurs=["polynomial", "no"]) logger.debug('BIGLAKE_MODEL = ' + str(self.cfg.get('CONFIG_PARAMS', 'BIGLAKE_MODEL'))) self.cfg.test_var_config_file('CONFIG_PARAMS', 'BIGLAKE_MIN_SIZE', float) logger.debug( 'BIGLAKE_MIN_SIZE = ' + str(self.cfg.get('CONFIG_PARAMS', 'BIGLAKE_MIN_SIZE'))) self.cfg.test_var_config_file('CONFIG_PARAMS', 'BIGLAKE_GRID_SPACING', float) logger.debug( 'BIGLAKE_GRID_SPACING = ' + str(self.cfg.get('CONFIG_PARAMS', 'BIGLAKE_GRID_SPACING'))) self.cfg.test_var_config_file('CONFIG_PARAMS', 'BIGLAKE_GRID_RES', float) logger.debug( 'BIGLAKE_GRID_RES = ' + str(self.cfg.get('CONFIG_PARAMS', 'BIGLAKE_GRID_RES'))) # 2.2 - ID section # Nb digits for counter of lakes in a tile or pass self.cfg.test_var_config_file('ID', 'NB_DIGITS', str) logger.debug('NB_DIGITS = ' + str(self.cfg.get('ID', 'NB_DIGITS'))) # 2.3 - FILENAMES_PATTERN section # Product generator self.cfg.test_var_config_file('FILENAMES_PATTERN', 'PRODUCER', str) logger.debug('PRODUCER = ' + str(self.cfg.get('FILENAMES_PATTERN', 'PRODUCER'))) # Composite Release IDentifier for LakeTile processing self.cfg.test_var_config_file('FILENAMES_PATTERN', 'LAKE_TILE_CRID', str) logger.debug( 'LAKE_TILE_CRID = ' + str(self.cfg.get('FILENAMES_PATTERN', 'LAKE_TILE_CRID'))) # Composite Release IDentifier for LakeSP processing self.cfg.test_var_config_file('FILENAMES_PATTERN', 'LAKE_SP_CRID', str) logger.debug( 'LAKE_SP_CRID = ' + str(self.cfg.get('FILENAMES_PATTERN', 'LAKE_SP_CRID'))) # PIXC product self.cfg.test_var_config_file('FILENAMES_PATTERN', 'PIXC_PREFIX', str) logger.debug('PIXC_PREFIX = ' + str(self.cfg.get('FILENAMES_PATTERN', 'PIXC_PREFIX'))) self.cfg.test_var_config_file('FILENAMES_PATTERN', 'PIXC_PATTERN_PRINT', str) logger.debug( 'PIXC_PATTERN_PRINT = ' + str(self.cfg.get('FILENAMES_PATTERN', 'PIXC_PATTERN_PRINT'))) self.cfg.test_var_config_file('FILENAMES_PATTERN', 'PIXC_PATTERN_IND', str) logger.debug( 'PIXC_PATTERN_IND = ' + str(self.cfg.get('FILENAMES_PATTERN', 'PIXC_PATTERN_IND'))) # PIXCVecRiver product self.cfg.test_var_config_file('FILENAMES_PATTERN', 'PIXCVEC_RIVER_PREFIX', str) logger.debug( 'PIXCVEC_RIVER_PREFIX = ' + str(self.cfg.get('FILENAMES_PATTERN', 'PIXCVEC_RIVER_PREFIX'))) self.cfg.test_var_config_file('FILENAMES_PATTERN', 'PIXCVEC_RIVER_PATTERN_PRINT', str) logger.debug('PIXCVEC_RIVER_PATTERN_PRINT = ' + str( self.cfg.get('FILENAMES_PATTERN', 'PIXCVEC_RIVER_PATTERN_PRINT'))) self.cfg.test_var_config_file('FILENAMES_PATTERN', 'PIXCVEC_RIVER_PATTERN_IND', str) logger.debug('PIXCVEC_RIVER_PATTERN_IND = ' + str( self.cfg.get('FILENAMES_PATTERN', 'PIXCVEC_RIVER_PATTERN_IND')) ) # LakeTile product self.cfg.test_var_config_file('FILENAMES_PATTERN', 'LAKE_TILE_PREFIX', str) logger.debug( 'LAKE_TILE_PREFIX = ' + str(self.cfg.get('FILENAMES_PATTERN', 'LAKE_TILE_PREFIX'))) self.cfg.test_var_config_file('FILENAMES_PATTERN', 'LAKE_TILE_PATTERN', str) logger.debug( 'LAKE_TILE_PATTERN = ' + str(self.cfg.get('FILENAMES_PATTERN', 'LAKE_TILE_PATTERN'))) self.cfg.test_var_config_file('FILENAMES_PATTERN', 'LAKE_TILE_PATTERN_PRINT', str) logger.debug('LAKE_TILE_PATTERN_PRINT = ' + str( self.cfg.get('FILENAMES_PATTERN', 'LAKE_TILE_PATTERN_PRINT'))) self.cfg.test_var_config_file('FILENAMES_PATTERN', 'LAKE_TILE_PATTERN_IND', str) logger.debug('LAKE_TILE_PATTERN_IND = ' + str( self.cfg.get('FILENAMES_PATTERN', 'LAKE_TILE_PATTERN_IND'))) self.cfg.test_var_config_file('FILENAMES_PATTERN', 'LAKE_TILE_SHP_SUFFIX', str) logger.debug( 'LAKE_TILE_SHP_SUFFIX = ' + str(self.cfg.get('FILENAMES_PATTERN', 'LAKE_TILE_SHP_SUFFIX'))) self.cfg.test_var_config_file('FILENAMES_PATTERN', 'LAKE_TILE_SHP_META_SUFFIX', str) logger.debug('LAKE_TILE_SHP_META_SUFFIX = ' + str( self.cfg.get('FILENAMES_PATTERN', 'LAKE_TILE_SHP_META_SUFFIX')) ) self.cfg.test_var_config_file('FILENAMES_PATTERN', 'LAKE_TILE_EDGE_SUFFIX', str) logger.debug('LAKE_TILE_EDGE_SUFFIX = ' + str( self.cfg.get('FILENAMES_PATTERN', 'LAKE_TILE_EDGE_SUFFIX'))) self.cfg.test_var_config_file('FILENAMES_PATTERN', 'LAKE_TILE_PIXCVEC_SUFFIX', str) logger.debug('LAKE_TILE_PIXCVEC_SUFFIX = ' + str( self.cfg.get('FILENAMES_PATTERN', 'LAKE_TILE_PIXCVEC_SUFFIX'))) # Error managed except service_error.ConfigFileError: logger.error("Error in the configuration file ", self.cfg.path_conf) raise # Warning error not managed ! except Exception: logger.error( "Something wrong happened during configuration file check!") raise return True