def run_preprocessing(self): """ Retrieve the list of input files, i.e. L2_HR_LakeTile products for wanted pass = shapefile + PIXC_edge + PIXCVec files """ my_api.printInfo("") my_api.printInfo("") my_api.printInfo("[lakeSPProcessing] PRE-PROCESSING...") my_api.printInfo("") # 1 - Test existence of directories my_api.printInfo("[lakeSPProcessing] > 1 - Testing existence of working directories ...") # 1.1 - LakeTile directory my_api.printInfo("[lakeSPProcessing] INPUT LakeTile DIR = %s" % self.lake_tile_dir) my_tools.testDir(self.lake_tile_dir) # 1.2 - Output directory my_api.printInfo("[lakeSPProcessing] OUTPUT DIR = %s" % self.output_dir) my_tools.testDir(self.output_dir) my_api.printInfo("") # 2 - Get list of input files my_api.printInfo("[lakeSPProcessing] > 2 - Retrieving input files ...") # 2.1 - Get ascending or descending orientation self.ascending = (self.pass_num%2 == 0) # TODO: replace when new orbits # NB: Ascending if pass_num is odd, descending if pass_num is pair. # self.ascending = ((self.pass_num%2 - 1) == 0) # 2.2 - Compute file prefix regarding cycle and pass conditions cond_prefix = my_var.LAKE_TILE_PREFIX # Add cycle number condition cond_prefix += "%03d" % self.cycle_num # Add orbit number condition cond_prefix += "_%03d" % self.pass_num my_api.printInfo("[lakeSPProcessing] LakeTile files with cycle=%03d and orbit=%03d (%s)" % (self.cycle_num, self.pass_num, self.asc_dict[self.ascending])) # 2.3 - List all files in LakeTile directory lake_tile_list = os.listdir(self.lake_tile_dir) # 2.4 - For each listed file, if it's a metadata file related to LakeTile_shp, get related _edge and _pixcvec files if they exist # Init tile_ref_list_R = {} # List of tile reference for right swath, organized by continent tile_ref_list_L = {} # List of tile reference for left swath, organized by continent # Loop on LakeTile_pixcvec files flag_first = True # Flag if it's the first tile to deal with for curFile in lake_tile_list: # Test if file meets the condition if curFile.startswith(cond_prefix) and curFile.endswith(my_var.LAKE_TILE_SHP_META_SUFFIX): # Test if it's a wanted LakeTile_shp file (NB: .shp.xml used instead of .shp because _edge and _pixcvec may also have associated .shp file) # Init add_tuple flag to True; set to False if 1 file of LakeTile product is missing add_tuple = True # Shapefile cur_shp = curFile.replace(my_var.LAKE_TILE_SHP_META_SUFFIX, my_var.LAKE_TILE_SHP_SUFFIX) if not os.path.exists(os.path.join(self.lake_tile_dir, cur_shp)): # Test if associated _shp file exists add_tuple = False # Shapefile cur_pixcvec = curFile.replace(my_var.LAKE_TILE_SHP_META_SUFFIX, my_var.LAKE_TILE_PIXCVEC_SUFFIX) if not os.path.exists(os.path.join(self.lake_tile_dir, cur_shp)): # Test if associated _shp file exists add_tuple = False # Edge file cur_edge = curFile.replace(my_var.LAKE_TILE_SHP_META_SUFFIX, my_var.LAKE_TILE_EDGE_SUFFIX) if not os.path.exists(os.path.join(self.lake_tile_dir, cur_edge)): # Test if associated _edge file exists add_tuple = False # Add tuple if exists if add_tuple: self.nb_input_tiles += 1 # Get metadata metadata = ET.parse(os.path.join(self.lake_tile_dir, curFile)) try: cur_continent = metadata.xpath("//LakeTile_shp/tile_info/continent")[0].text except: cur_continent = "WORLD" if flag_first: flag_first = False # Init list of continents for the pass self.list_continent = [cur_continent] # Init lists for continent self.lake_tile_shp_file_path_list[cur_continent] = [] self.lake_tile_pixcvec_path_list_R[cur_continent] = [] self.lake_tile_edge_path_list_R[cur_continent] = [] tile_ref_list_R[cur_continent] = [] self.lake_tile_pixcvec_path_list_L[cur_continent] = [] self.lake_tile_edge_path_list_L[cur_continent] = [] tile_ref_list_L[cur_continent] = [] # Overwrite metadata if 1st file processed print() print("WORKING VARIABLES retrieved from LakeTile processing") my_var.overwriteConfig_from_xml(metadata) print() else: # Test if new continent if not cur_continent in self.list_continent: # Add new continent to the list self.list_continent.append(cur_continent) # Init lists for new continent self.lake_tile_shp_file_path_list[cur_continent] = [] self.lake_tile_pixcvec_path_list_R[cur_continent] = [] self.lake_tile_edge_path_list_R[cur_continent] = [] tile_ref_list_R[cur_continent] = [] self.lake_tile_pixcvec_path_list_L[cur_continent] = [] self.lake_tile_edge_path_list_L[cur_continent] = [] tile_ref_list_L[cur_continent] = [] # Metadata should be the same as the others my_var.compareConfig_to_xml(metadata) # Add LakeTile_shp to list self.lake_tile_shp_file_path_list[cur_continent].append(os.path.join(self.lake_tile_dir, cur_shp)) # Get latitude from filename # TODO: change when tile numbering is fixed TMP_infos = my_names.getInfoFromFilename(curFile, "LakeTile") TMP_tile = TMP_infos["tile_ref"].split("-")[0] TMP_lat = int(TMP_tile[:-1]) if TMP_tile.endswith("S"): TMP_lat = -TMP_lat # In Right swath list if "-R" in curFile: self.lake_tile_pixcvec_path_list_R[cur_continent].append(os.path.join(self.lake_tile_dir, cur_pixcvec)) self.lake_tile_edge_path_list_R[cur_continent].append(os.path.join(self.lake_tile_dir, cur_edge)) tile_ref_list_R[cur_continent].append(TMP_lat) # In Left swath list elif "-L" in curFile: self.lake_tile_pixcvec_path_list_L[cur_continent].append(os.path.join(self.lake_tile_dir, cur_pixcvec)) self.lake_tile_edge_path_list_L[cur_continent].append(os.path.join(self.lake_tile_dir, cur_edge)) tile_ref_list_L[cur_continent].append(TMP_lat) # 2.5 - Test list of continents if ("WORLD" in self.list_continent) and (len(self.list_continent) > 1): my_api.exitWithError("[lakeSPProcessing] Mix of continent and no continent split; look at tiles process") # 2.6 - Sort files from south to north, continent per continent for curContinent in self.list_continent: sorted_idx_R = np.argsort(tile_ref_list_R[curContinent]) self.lake_tile_pixcvec_path_list_R[curContinent] = [self.lake_tile_pixcvec_path_list_R[curContinent][ind] for ind in sorted_idx_R] self.lake_tile_edge_path_list_R[curContinent] = [self.lake_tile_edge_path_list_R[curContinent][ind] for ind in sorted_idx_R] sorted_idx_L = np.argsort(tile_ref_list_L[curContinent]) self.lake_tile_pixcvec_path_list_L[curContinent] = [self.lake_tile_pixcvec_path_list_L[curContinent][ind] for ind in sorted_idx_L] self.lake_tile_edge_path_list_L[curContinent] = [self.lake_tile_edge_path_list_L[curContinent][ind] for ind in sorted_idx_L] # 2.7 - Print list of files, per continent for curContinent in self.list_continent: my_api.printInfo("[lakeSPProcessing] > Continent %s --> %d tile(s) to deal with" % (curContinent, len(self.lake_tile_shp_file_path_list[curContinent]))) for curFile in self.lake_tile_shp_file_path_list[curContinent]: my_api.printInfo("[lakeSPProcessing] %s" % os.path.basename(curFile)) my_api.printInfo("") my_api.printInfo("[lakeSPProcessing] --> %d tile(s) to deal with, over %d continent(s)" % (self.nb_input_tiles, len(self.list_continent))) my_api.printInfo("") # 3 - Retrieve lake Db layer my_api.printInfo("[lakeTileProcessing] > 3 - Retrieving lake database layer...") if my_var.LAKE_DB == "": my_api.printInfo("[lakeTileProcessing] NO database specified -> NO link of SWOT obs with a priori lake") else: if os.path.exists(my_var.LAKE_DB): type_db = my_var.LAKE_DB.split('.')[-1] # Type of database if type_db == "shp": # Shapefile format self.objLakeDb = lake_db.LakeDb_shp(my_var.LAKE_DB) elif type_db == "sqlite": # SGLite format self.objLakeDb = lake_db.LakeDb_sqlite(my_var.LAKE_DB) else: my_api.exitWithError("[lakeTileProcessing] Lake a priori database format (%s) is unknown: must be .shp or .sqlite" % type_db) else: my_api.exitWithError("[lakeTileProcessing] ERROR = %s doesn't exist" % my_var.LAKE_DB) my_api.printInfo("")
shp_option = my_params["flag_prod_shp"] else: print("[ERROR]") print("Run by pge_lake_sp.py param_file.cfg [-l] [-v VERBOSE]") print("OR pge_lake_sp.py lake_tile_dir output_dir cycle_num pass_num [-shp] [-l] [-v VERBOSE]") sys.exit("indir_or_param_file is %s, not .cfg" % file_extent) # 1.3 - Test input params have been filled # 1.3.1 - LakeTile directory if laketile_dir is None: my_api.exitWithError("LakeTile directory is missing in %s" % location) # 1.3.2 - Output directory if output_dir is None: my_api.exitWithError("Output directory is missing in %s" % location) my_tools.testDir(output_dir) # Test existence of output directory here and not in pre-proc because used in 1.5 # 1.3.3 - Cycle number if cycle_num is None: my_api.exitWithError("Cycle number is missing in %s" % location) # 1.3.4 - Pass number if pass_num is None: my_api.exitWithError("Pass number is missing in %s" % location) # 1.4 - Init environment for verbose level verbose_level = my_api.setVerbose(args.verbose) print("> Verbose level = %s" % verbose_level) # 1.5 - Init environment for log if args.logfile: logFile = os.path.join(output_dir, "pge_lake_sp_" + datetime.datetime.now().strftime("%Y%m%d-%H%M%S") + ".log") my_api.initLogger(logFile, verbose_level)
def run_preprocessing(self): """ Retrieve the list of input files, i.e. L2_HR_PIXC tile file main and associated L2_HR_PIXCVec """ print("") print("") print("[multiLakeTileProcessing] PRE-PROCESSING...") print("") # 1 - Test existence of directories print( "[multiLakeTileProcessing] > 1 - Testing existence of working directories..." ) # 1.1 - PIXC directory print("[multiLakeTileProcessing] INPUT DIR for PIXC files = %s" % self.pixc_dir) my_tools.testDir(self.pixc_dir) # 1.3 - PIXCVecRiver directory print( "[multiLakeTileProcessing] INPUT DIR for PIXCVecRiver files = %s" % self.pixc_vec_river_dir) my_tools.testDir(self.pixc_vec_river_dir) # 1.4 - Output directory print("[multiLakeTileProcessing] OUTPUT DIR = %s" % self.output_dir) my_tools.testDir(self.output_dir) print("") # 2 - Get input files print("[multiLakeTileProcessing] > 2 - Retrieving input files...") # 2.1 - Compute file prefix regarding cycle / pass / tile conditions cond_prefix = my_var.PIXC_PREFIX # Deal with all PixC files in self.pixc_dir if (self.cycle_num is None) or (self.cycle_num == "-1"): print( "[multiLakeTileProcessing] All PixC files in the input directory" ) else: # Deal with PixC files with cycle = self.cycle_num cond_prefix += "%03d" % self.cycle_num if (self.pass_num is None) or (self.pass_num == "-1"): print( "[multiLakeTileProcessing] PixC files with cycle=%03d" % self.cycle_num) else: # Deal with PixC files with cycle = self.cycle_num and pass = self.pass_num cond_prefix += "_%03d" % self.pass_num if self.tile_ref is not None: # Deal with PixC files with cycle = self.cycle_num, pass = self.pass_num and tile id = self.tile_ref print( "[multiLakeTileProcessing] PixC files with cycle=%03d , pass=%03d , tile=%s" % (self.cycle_num, self.pass_num, self.tile_ref)) cond_prefix += "_%s" % self.tile_ref else: print( "[multiLakeTileProcessing] PixC files with cycle=%03d and pass=%03d" % (self.cycle_num, self.pass_num)) # 2.2 - List all files in self.pixc_dir tmp_list = os.listdir(self.pixc_dir) # 2.3 - For each listed file, get related PIXCVecRiver files if they exist cur_pixc_vec_river = None for cur_item in tmp_list: # Test if file meets the condition if cur_item.startswith( cond_prefix): # Test if it's a wanted PIXC file # Associated PIXCVecRiver file name cur_pixc_vec_river = cur_item.replace( my_var.PIXC_PREFIX, my_var.PIXCVEC_RIVER_PREFIX) # If associated PIXCVecRiver file exists, add pair of filenames if os.path.exists( os.path.join(self.pixc_vec_river_dir, cur_pixc_vec_river)): self.list_pixc.append(cur_item) self.list_pixc_vec_river.append(cur_pixc_vec_river) self.nb_input += 1 print("[multiLakeTileProcessing] --> %d tile(s) to deal with" % self.nb_input) print("")
def run_preprocessing(self): """ Process PGE_LakeTile IN, i.e. test input paths, retrieve orbit infos, open lake database and init objects """ logger = logging.getLogger(self.__class__.__name__) logger.info("") logger.info("") logger.info("**************************") logger.info("***** PRE-PROCESSING *****") logger.info("**************************") logger.info("") # 1 - Test existance and file format of input paths logger.info("> 1 - Testing existence of input paths...") # 1.1 - PIXC file message = " INPUT PIXC file = %s" % self.pixc_file logger.info(message) my_tools.testFile(self.pixc_file, IN_extent=".nc") # 1.2 - PIXCVecRiver file message = " INPUT PIXCVecRiver file = %s" % self.pixc_vec_river_file logger.info(message) my_tools.testFile(self.pixc_vec_river_file, IN_extent=".nc") # 1.3 - Output directory message = " OUTPUT DIR = %s" % self.output_dir logger.info(message) my_tools.testDir(self.output_dir) logger.info("") # 2 - Retrieve orbit info from PIXC filename and compute output filenames logger.info("> 2 - Retrieving tile infos from PIXC filename...") self.lake_tile_filenames = my_names.lakeTileFilenames( self.pixc_file, self.pixc_vec_river_file, self.output_dir) logger.info("") # 3 - Objects initialisation logger.info("> 3 - Init and format intput objects...") logger.info("") # 3.1 - Init PIXCVec product by retrieving data from the pixel cloud complementary file after river processing logger.info("> 3a - Init pixel cloud complementary file...") self.objPixcVec = proc_pixc_vec.PixelCloudVec("TILE") self.objPixcVec.set_from_pixcvec_file(self.pixc_vec_river_file) logger.info("") # 3.2 - Retrieve needed data from the pixel cloud logger.info("> 3b - Retrieving needed data from the pixel cloud...") self.objPixc = proc_pixc.PixelCloud() self.objPixc.set_from_pixc_file(self.pixc_file, self.objPixcVec.reject_index) logger.info("") # 3.3 - Reshape PIXCVec arrays logger.info("> 3c - Reshape PIXCVecRiver arrays...") self.objPixcVec.reshape(self.objPixc) logger.info("") # 4 - Retrieve lake Db layer logger.info("> 4 - Retrieving lake database layer...") if my_var.LAKE_DB == "": logger.info( "NO database specified -> NO link of SWOT obs with a priori lake" ) else: if os.path.exists(my_var.LAKE_DB): type_db = my_var.LAKE_DB.split('.')[-1] # Type of database if type_db == "shp": # Shapefile format self.objLakeDb = lake_db.LakeDb_shp( my_var.LAKE_DB, self.objPixc.tile_poly) elif type_db == "sqlite": # SQLite format self.objLakeDb = lake_db.LakeDb_sqlite( my_var.LAKE_DB, self.objPixc.tile_poly) else: message = "Lake a priori database format (%s) is unknown: must be .shp or .sqlite" % type_db raise service_error.ProcessingError(message, logger) else: message = " ERROR = %s doesn't exist" % my_var.LAKE_DB raise service_error.ProcessingError(message, logger) logger.info("") # 5 - Initialize lake product logger.info("> 5 - Init lake product object...") self.objLake = proc_lake.LakeProduct( "TILE", self.objPixc, self.objPixcVec, self.objLakeDb, os.path.basename( self.lake_tile_filenames.lake_tile_shp_file).split(".")[0], in_id_prefix=self.lake_tile_filenames.lake_id_prefix) logger.info("")
def run_preprocessing(self): """ Retrieve the list of input files, i.e. L2_HR_LakeTile products for wanted passes = shapefile + PIXC_edge + PIXCVec files """ my_api.printInfo("") my_api.printInfo("") my_api.printInfo("[multiLakeSPProcessing] PRE-PROCESSING...") my_api.printInfo("") # 1 - Test existence of directories my_api.printInfo( "[multiLakeSPProcessing] > 1 - Testing existence of working directories..." ) # 1.1 - LakeTile directory my_api.printInfo("[multiLakeSPProcessing] INPUT LakeTile DIR = %s" % self.lake_tile_dir) my_tools.testDir(self.lake_tile_dir) # 1.2 - Output directory my_api.printInfo("[multiLakeSPProcessing] OUTPUT DIR = %s" % self.output_dir) my_tools.testDir(self.output_dir) my_api.printInfo("") # 2 - Get input files my_api.printInfo( "[multiLakeSPProcessing] > 2 - Retrieving input files...") # 2.1 - Compute file prefix regarding cycle / pass / tile conditions cond_prefix = my_var.LAKE_TILE_PREFIX # Deal with all LakeTile products in self.lake_tile_dir if (self.cycle_num is None) or (self.cycle_num == "-1"): my_api.printInfo( "[multiLakeSPProcessing] All LakeTile files in the input directory" ) else: # Deal with LakeTile files with cycle = self.cycle_num cond_prefix += "%03d" % self.cycle_num if (self.pass_num is None) or (self.pass_num == "-1"): my_api.printInfo( "[multiLakeSPProcessing] LakeTile files with cycle=%03d" % self.cycle_num) else: # Deal with LakeTile files with cycle = self.cycle_num and pass = self.pass_num cond_prefix += "_%03d" % self.pass_num my_api.printInfo( "[multiLakeSPProcessing] LakeTile files with cycle=%03d and pass=%03d" % (self.cycle_num, self.pass_num)) # 2.2 - List all files in input directory TMP_list = os.listdir(self.lake_tile_dir) # 2.3 - For each listed file, get (cycle, pass) pair for curFile in TMP_list: # Test if file meets the condition if curFile.startswith(cond_prefix) and curFile.endswith( my_var.LAKE_TILE_SHP_META_SUFFIX ): # Test if it's a wanted LakeTile_shp file TMP_infos = my_names.getInfoFromFilename(curFile, "LakeTile") if int(TMP_infos["pass"]) in self.list_pass: TMP_ind = [ indice for indice, valeur in enumerate(self.list_pass) if valeur == int(TMP_infos["pass"]) ] # All occurrences of current pass number in the list of passes TMP_subset_cycle = [ self.list_cycle[ind] for ind in TMP_ind ] # Subset of cycle numbers related to current pass if int( TMP_infos["cycle"] ) not in TMP_subset_cycle: # If current cycle not in subset = (cycle, pass) pair not listed self.list_cycle.append(int(TMP_infos["cycle"])) self.list_pass.append(int(TMP_infos["pass"])) else: self.list_cycle.append(int(TMP_infos["cycle"])) self.list_pass.append(int(TMP_infos["pass"])) my_api.printInfo( "[multiLakeSPProcessing] --> %d (cycle, pass) pair(s) to deal with" % len(self.list_cycle)) my_api.printInfo("")
def _check_config_parameters(self): """ Check parameters coherence for LakeTile parameter file :return: True if OK """ logger = logging.getLogger(self.__class__.__name__) try: # 1 - Config parameters from command file # 1.1 - PATH section # PIXC file self.cfg.test_var_config_file('PATHS', 'PIXC file', str) my_tools.testFile(self.cfg.get('PATHS', 'PIXC file')) logger.debug('PIXC file = ' + str(self.cfg.get('PATHS', 'PIXC file'))) # PIXCVecRiver file self.cfg.test_var_config_file('PATHS', 'PIXCVecRiver file', str) my_tools.testFile(self.cfg.get('PATHS', 'PIXCVecRiver file')) logger.debug('PIXCvecRiver file = ' + str(self.cfg.get('PATHS', 'PIXCVecRiver file'))) # Output directory self.cfg.test_var_config_file('PATHS', 'Output directory', str) my_tools.testDir(self.cfg.get('PATHS', 'Output directory')) logger.debug('Output directory = ' + str(self.cfg.get('PATHS', 'Output directory'))) # 1.2 - DATABASES section # Lake database full path self.cfg.test_var_config_file('DATABASES', 'LAKE_DB', str) my_tools.testFile(self.cfg.get('DATABASES', 'LAKE_DB')) logger.debug('LAKE_DB = ' + str(self.cfg.get('DATABASES', 'LAKE_DB'))) # Lake identifier attribute name in the database self.cfg.test_var_config_file('DATABASES', 'LAKE_DB_ID', str) logger.debug('LAKE_DB_ID = ' + str(self.cfg.get('DATABASES', 'LAKE_DB_ID'))) # Continent file if want LakeSP product split per continent if self.cfg.get('DATABASES', 'CONTINENT_FILE') is None: logger.debug( 'CONTINENT_FILE not filled => LakeTile product not linked to a continent' ) else: self.cfg.test_var_config_file('DATABASES', 'CONTINENT_FILE', str) my_tools.testFile(self.cfg.get('DATABASES', 'CONTINENT_FILE')) logger.debug('CONTINENT_FILE = ' + str(self.cfg.get('DATABASES', 'CONTINENT_FILE'))) # 1.3 - OPTIONS section # Shapefile production self.cfg.test_var_config_file('OPTIONS', 'Produce shp', bool) logger.debug('Produce shp = ' + str(self.cfg.get('OPTIONS', 'Produce shp'))) # 2 - Config parameters from parameter file # 2.1 - CONFIG_PARAMS section # Water flag = 3=water near land edge 4=interior water self.cfg.test_var_config_file('CONFIG_PARAMS', 'FLAG_WATER', str) logger.debug('FLAG_WATER = ' + str(self.cfg.get('CONFIG_PARAMS', 'FLAG_WATER'))) # Dark water flag = 23=darkwater near land 24=interior dark water self.cfg.test_var_config_file('CONFIG_PARAMS', 'FLAG_DARK', str) logger.debug('FLAG_DARK = ' + str(self.cfg.get('CONFIG_PARAMS', 'FLAG_DARK'))) # Min size for a lake to generate a lake product (=polygon + attributes) for it self.cfg.test_var_config_file('CONFIG_PARAMS', 'MIN_SIZE', float) logger.debug('MIN_SIZE = ' + str(self.cfg.get('CONFIG_PARAMS', 'MIN_SIZE'))) # Maximal standard deviation of height inside a lake self.cfg.test_var_config_file('CONFIG_PARAMS', 'STD_HEIGHT_MAX', float) logger.debug('STD_HEIGHT_MAX = ' + str(self.cfg.get('CONFIG_PARAMS', 'STD_HEIGHT_MAX'))) # To improve PixC golocation (=True) or not (=False) self.cfg.test_var_config_file('CONFIG_PARAMS', 'IMP_GEOLOC', bool) logger.debug('IMP_GEOLOC = ' + str(self.cfg.get('CONFIG_PARAMS', 'IMP_GEOLOC'))) # Method to compute lake boundary or polygon hull # 0=convex hull 1=concav hull (1.0=with alpha param (default) 1.1=without) 2=concav hull radar vectorisation self.cfg.test_var_config_file('CONFIG_PARAMS', 'HULL_METHOD', float, valeurs=[0, 1, 1.1, 2]) logger.debug('HULL_METHOD = ' + str(self.cfg.get('CONFIG_PARAMS', 'HULL_METHOD'))) # Big lakes parameters for improved geoloc self.cfg.test_var_config_file('CONFIG_PARAMS', 'BIGLAKE_MODEL', str, valeurs=["polynomial", "no"]) logger.debug('BIGLAKE_MODEL = ' + str(self.cfg.get('CONFIG_PARAMS', 'BIGLAKE_MODEL'))) self.cfg.test_var_config_file('CONFIG_PARAMS', 'BIGLAKE_MIN_SIZE', float) logger.debug( 'BIGLAKE_MIN_SIZE = ' + str(self.cfg.get('CONFIG_PARAMS', 'BIGLAKE_MIN_SIZE'))) self.cfg.test_var_config_file('CONFIG_PARAMS', 'BIGLAKE_GRID_SPACING', float) logger.debug( 'BIGLAKE_GRID_SPACING = ' + str(self.cfg.get('CONFIG_PARAMS', 'BIGLAKE_GRID_SPACING'))) self.cfg.test_var_config_file('CONFIG_PARAMS', 'BIGLAKE_GRID_RES', float) logger.debug( 'BIGLAKE_GRID_RES = ' + str(self.cfg.get('CONFIG_PARAMS', 'BIGLAKE_GRID_RES'))) # 2.2 - ID section # Nb digits for counter of lakes in a tile or pass self.cfg.test_var_config_file('ID', 'NB_DIGITS', str) logger.debug('NB_DIGITS = ' + str(self.cfg.get('ID', 'NB_DIGITS'))) # 2.3 - FILENAMES_PATTERN section # Product generator self.cfg.test_var_config_file('FILENAMES_PATTERN', 'PRODUCER', str) logger.debug('PRODUCER = ' + str(self.cfg.get('FILENAMES_PATTERN', 'PRODUCER'))) # Composite Release IDentifier for LakeTile processing self.cfg.test_var_config_file('FILENAMES_PATTERN', 'LAKE_TILE_CRID', str) logger.debug( 'LAKE_TILE_CRID = ' + str(self.cfg.get('FILENAMES_PATTERN', 'LAKE_TILE_CRID'))) # Composite Release IDentifier for LakeSP processing self.cfg.test_var_config_file('FILENAMES_PATTERN', 'LAKE_SP_CRID', str) logger.debug( 'LAKE_SP_CRID = ' + str(self.cfg.get('FILENAMES_PATTERN', 'LAKE_SP_CRID'))) # PIXC product self.cfg.test_var_config_file('FILENAMES_PATTERN', 'PIXC_PREFIX', str) logger.debug('PIXC_PREFIX = ' + str(self.cfg.get('FILENAMES_PATTERN', 'PIXC_PREFIX'))) self.cfg.test_var_config_file('FILENAMES_PATTERN', 'PIXC_PATTERN_PRINT', str) logger.debug( 'PIXC_PATTERN_PRINT = ' + str(self.cfg.get('FILENAMES_PATTERN', 'PIXC_PATTERN_PRINT'))) self.cfg.test_var_config_file('FILENAMES_PATTERN', 'PIXC_PATTERN_IND', str) logger.debug( 'PIXC_PATTERN_IND = ' + str(self.cfg.get('FILENAMES_PATTERN', 'PIXC_PATTERN_IND'))) # PIXCVecRiver product self.cfg.test_var_config_file('FILENAMES_PATTERN', 'PIXCVEC_RIVER_PREFIX', str) logger.debug( 'PIXCVEC_RIVER_PREFIX = ' + str(self.cfg.get('FILENAMES_PATTERN', 'PIXCVEC_RIVER_PREFIX'))) self.cfg.test_var_config_file('FILENAMES_PATTERN', 'PIXCVEC_RIVER_PATTERN_PRINT', str) logger.debug('PIXCVEC_RIVER_PATTERN_PRINT = ' + str( self.cfg.get('FILENAMES_PATTERN', 'PIXCVEC_RIVER_PATTERN_PRINT'))) self.cfg.test_var_config_file('FILENAMES_PATTERN', 'PIXCVEC_RIVER_PATTERN_IND', str) logger.debug('PIXCVEC_RIVER_PATTERN_IND = ' + str( self.cfg.get('FILENAMES_PATTERN', 'PIXCVEC_RIVER_PATTERN_IND')) ) # LakeTile product self.cfg.test_var_config_file('FILENAMES_PATTERN', 'LAKE_TILE_PREFIX', str) logger.debug( 'LAKE_TILE_PREFIX = ' + str(self.cfg.get('FILENAMES_PATTERN', 'LAKE_TILE_PREFIX'))) self.cfg.test_var_config_file('FILENAMES_PATTERN', 'LAKE_TILE_PATTERN', str) logger.debug( 'LAKE_TILE_PATTERN = ' + str(self.cfg.get('FILENAMES_PATTERN', 'LAKE_TILE_PATTERN'))) self.cfg.test_var_config_file('FILENAMES_PATTERN', 'LAKE_TILE_PATTERN_PRINT', str) logger.debug('LAKE_TILE_PATTERN_PRINT = ' + str( self.cfg.get('FILENAMES_PATTERN', 'LAKE_TILE_PATTERN_PRINT'))) self.cfg.test_var_config_file('FILENAMES_PATTERN', 'LAKE_TILE_PATTERN_IND', str) logger.debug('LAKE_TILE_PATTERN_IND = ' + str( self.cfg.get('FILENAMES_PATTERN', 'LAKE_TILE_PATTERN_IND'))) self.cfg.test_var_config_file('FILENAMES_PATTERN', 'LAKE_TILE_SHP_SUFFIX', str) logger.debug( 'LAKE_TILE_SHP_SUFFIX = ' + str(self.cfg.get('FILENAMES_PATTERN', 'LAKE_TILE_SHP_SUFFIX'))) self.cfg.test_var_config_file('FILENAMES_PATTERN', 'LAKE_TILE_SHP_META_SUFFIX', str) logger.debug('LAKE_TILE_SHP_META_SUFFIX = ' + str( self.cfg.get('FILENAMES_PATTERN', 'LAKE_TILE_SHP_META_SUFFIX')) ) self.cfg.test_var_config_file('FILENAMES_PATTERN', 'LAKE_TILE_EDGE_SUFFIX', str) logger.debug('LAKE_TILE_EDGE_SUFFIX = ' + str( self.cfg.get('FILENAMES_PATTERN', 'LAKE_TILE_EDGE_SUFFIX'))) self.cfg.test_var_config_file('FILENAMES_PATTERN', 'LAKE_TILE_PIXCVEC_SUFFIX', str) logger.debug('LAKE_TILE_PIXCVEC_SUFFIX = ' + str( self.cfg.get('FILENAMES_PATTERN', 'LAKE_TILE_PIXCVEC_SUFFIX'))) # Error managed except service_error.ConfigFileError: logger.error("Error in the configuration file ", self.cfg.path_conf) raise # Warning error not managed ! except Exception: logger.error( "Something wrong happened during configuration file check!") raise return True