def getEdgeLabels(self, IN_tile_idx, IN_edge_loc_str): """ This function returns the LakeTile_edge labels of pixels within the buffer zone :param IN_tile_idx: indices of pixels at the edge of tile :type IN_tile_idx: 1D array of int :param IN_edge_loc_str: edge location = "top" or "bottom" :type IN_edge_loc_str: string :return: LakeTile_edge labels of pixels within the buffer zone :rtype: 1D-array of int """ idx_edge_buf = None # Associated location if IN_edge_loc_str == "bottom": # Bottom of tile: get indices of all pixels with azimuth zero idx_edge_buf = np.where(self.azimuth_idx[IN_tile_idx] == 0)[0] elif IN_edge_loc_str == "top": # Top of tile, get indices of all pixels with maximal azimuth az_max = max(self.azimuth_idx[IN_tile_idx]) idx_edge_buf = np.where(self.azimuth_idx[IN_tile_idx] == az_max)[0] else: my_api.exitWithError( "IN_edge_loc_str input variable has to be 'top' or 'bottom'") return self.edge_label[IN_tile_idx[idx_edge_buf]]
def getEdgePixels(self, IN_tile_idx, IN_edge_loc_str): """ The function returns range and azimuth of pixels located within a buffer around the tile edge specified in edge_loc_str. :param IN_tile_idx: indices of pixels edge of tile :type IN_tile_idx: 1D array of int :param IN_edge_loc_str: edge location = "top" or "bottom" :type IN_edge_loc_str: string :return: range and azimuth indices of pixels :rtype: 1D array of int """ idx_edge_buf = None # Associated location if IN_edge_loc_str == "bottom": # bottom of tile, get indices of all pixels with azimuth zero idx_edge_buf = np.where(self.azimuth_idx[IN_tile_idx] == 0)[0] elif IN_edge_loc_str == "top": # top of tile, get indices of all pixels with maximal azimuth az_max = max(self.azimuth_idx[IN_tile_idx]) idx_edge_buf = np.where(self.azimuth_idx[IN_tile_idx] == az_max)[0] else: my_api.exitWithError( "IN_edge_loc_str input variable has to be 'top' or 'bottom'") return self.range_idx[IN_tile_idx][idx_edge_buf], self.azimuth_idx[ IN_tile_idx][idx_edge_buf]
def run_preprocessing(self): """ Retrieve the list of input files, i.e. L2_HR_LakeTile products for wanted pass = shapefile + PIXC_edge + PIXCVec files """ my_api.printInfo("") my_api.printInfo("") my_api.printInfo("[lakeSPProcessing] PRE-PROCESSING...") my_api.printInfo("") # 1 - Test existence of directories my_api.printInfo("[lakeSPProcessing] > 1 - Testing existence of working directories ...") # 1.1 - LakeTile directory my_api.printInfo("[lakeSPProcessing] INPUT LakeTile DIR = %s" % self.lake_tile_dir) my_tools.testDir(self.lake_tile_dir) # 1.2 - Output directory my_api.printInfo("[lakeSPProcessing] OUTPUT DIR = %s" % self.output_dir) my_tools.testDir(self.output_dir) my_api.printInfo("") # 2 - Get list of input files my_api.printInfo("[lakeSPProcessing] > 2 - Retrieving input files ...") # 2.1 - Get ascending or descending orientation self.ascending = (self.pass_num%2 == 0) # TODO: replace when new orbits # NB: Ascending if pass_num is odd, descending if pass_num is pair. # self.ascending = ((self.pass_num%2 - 1) == 0) # 2.2 - Compute file prefix regarding cycle and pass conditions cond_prefix = my_var.LAKE_TILE_PREFIX # Add cycle number condition cond_prefix += "%03d" % self.cycle_num # Add orbit number condition cond_prefix += "_%03d" % self.pass_num my_api.printInfo("[lakeSPProcessing] LakeTile files with cycle=%03d and orbit=%03d (%s)" % (self.cycle_num, self.pass_num, self.asc_dict[self.ascending])) # 2.3 - List all files in LakeTile directory lake_tile_list = os.listdir(self.lake_tile_dir) # 2.4 - For each listed file, if it's a metadata file related to LakeTile_shp, get related _edge and _pixcvec files if they exist # Init tile_ref_list_R = {} # List of tile reference for right swath, organized by continent tile_ref_list_L = {} # List of tile reference for left swath, organized by continent # Loop on LakeTile_pixcvec files flag_first = True # Flag if it's the first tile to deal with for curFile in lake_tile_list: # Test if file meets the condition if curFile.startswith(cond_prefix) and curFile.endswith(my_var.LAKE_TILE_SHP_META_SUFFIX): # Test if it's a wanted LakeTile_shp file (NB: .shp.xml used instead of .shp because _edge and _pixcvec may also have associated .shp file) # Init add_tuple flag to True; set to False if 1 file of LakeTile product is missing add_tuple = True # Shapefile cur_shp = curFile.replace(my_var.LAKE_TILE_SHP_META_SUFFIX, my_var.LAKE_TILE_SHP_SUFFIX) if not os.path.exists(os.path.join(self.lake_tile_dir, cur_shp)): # Test if associated _shp file exists add_tuple = False # Shapefile cur_pixcvec = curFile.replace(my_var.LAKE_TILE_SHP_META_SUFFIX, my_var.LAKE_TILE_PIXCVEC_SUFFIX) if not os.path.exists(os.path.join(self.lake_tile_dir, cur_shp)): # Test if associated _shp file exists add_tuple = False # Edge file cur_edge = curFile.replace(my_var.LAKE_TILE_SHP_META_SUFFIX, my_var.LAKE_TILE_EDGE_SUFFIX) if not os.path.exists(os.path.join(self.lake_tile_dir, cur_edge)): # Test if associated _edge file exists add_tuple = False # Add tuple if exists if add_tuple: self.nb_input_tiles += 1 # Get metadata metadata = ET.parse(os.path.join(self.lake_tile_dir, curFile)) try: cur_continent = metadata.xpath("//LakeTile_shp/tile_info/continent")[0].text except: cur_continent = "WORLD" if flag_first: flag_first = False # Init list of continents for the pass self.list_continent = [cur_continent] # Init lists for continent self.lake_tile_shp_file_path_list[cur_continent] = [] self.lake_tile_pixcvec_path_list_R[cur_continent] = [] self.lake_tile_edge_path_list_R[cur_continent] = [] tile_ref_list_R[cur_continent] = [] self.lake_tile_pixcvec_path_list_L[cur_continent] = [] self.lake_tile_edge_path_list_L[cur_continent] = [] tile_ref_list_L[cur_continent] = [] # Overwrite metadata if 1st file processed print() print("WORKING VARIABLES retrieved from LakeTile processing") my_var.overwriteConfig_from_xml(metadata) print() else: # Test if new continent if not cur_continent in self.list_continent: # Add new continent to the list self.list_continent.append(cur_continent) # Init lists for new continent self.lake_tile_shp_file_path_list[cur_continent] = [] self.lake_tile_pixcvec_path_list_R[cur_continent] = [] self.lake_tile_edge_path_list_R[cur_continent] = [] tile_ref_list_R[cur_continent] = [] self.lake_tile_pixcvec_path_list_L[cur_continent] = [] self.lake_tile_edge_path_list_L[cur_continent] = [] tile_ref_list_L[cur_continent] = [] # Metadata should be the same as the others my_var.compareConfig_to_xml(metadata) # Add LakeTile_shp to list self.lake_tile_shp_file_path_list[cur_continent].append(os.path.join(self.lake_tile_dir, cur_shp)) # Get latitude from filename # TODO: change when tile numbering is fixed TMP_infos = my_names.getInfoFromFilename(curFile, "LakeTile") TMP_tile = TMP_infos["tile_ref"].split("-")[0] TMP_lat = int(TMP_tile[:-1]) if TMP_tile.endswith("S"): TMP_lat = -TMP_lat # In Right swath list if "-R" in curFile: self.lake_tile_pixcvec_path_list_R[cur_continent].append(os.path.join(self.lake_tile_dir, cur_pixcvec)) self.lake_tile_edge_path_list_R[cur_continent].append(os.path.join(self.lake_tile_dir, cur_edge)) tile_ref_list_R[cur_continent].append(TMP_lat) # In Left swath list elif "-L" in curFile: self.lake_tile_pixcvec_path_list_L[cur_continent].append(os.path.join(self.lake_tile_dir, cur_pixcvec)) self.lake_tile_edge_path_list_L[cur_continent].append(os.path.join(self.lake_tile_dir, cur_edge)) tile_ref_list_L[cur_continent].append(TMP_lat) # 2.5 - Test list of continents if ("WORLD" in self.list_continent) and (len(self.list_continent) > 1): my_api.exitWithError("[lakeSPProcessing] Mix of continent and no continent split; look at tiles process") # 2.6 - Sort files from south to north, continent per continent for curContinent in self.list_continent: sorted_idx_R = np.argsort(tile_ref_list_R[curContinent]) self.lake_tile_pixcvec_path_list_R[curContinent] = [self.lake_tile_pixcvec_path_list_R[curContinent][ind] for ind in sorted_idx_R] self.lake_tile_edge_path_list_R[curContinent] = [self.lake_tile_edge_path_list_R[curContinent][ind] for ind in sorted_idx_R] sorted_idx_L = np.argsort(tile_ref_list_L[curContinent]) self.lake_tile_pixcvec_path_list_L[curContinent] = [self.lake_tile_pixcvec_path_list_L[curContinent][ind] for ind in sorted_idx_L] self.lake_tile_edge_path_list_L[curContinent] = [self.lake_tile_edge_path_list_L[curContinent][ind] for ind in sorted_idx_L] # 2.7 - Print list of files, per continent for curContinent in self.list_continent: my_api.printInfo("[lakeSPProcessing] > Continent %s --> %d tile(s) to deal with" % (curContinent, len(self.lake_tile_shp_file_path_list[curContinent]))) for curFile in self.lake_tile_shp_file_path_list[curContinent]: my_api.printInfo("[lakeSPProcessing] %s" % os.path.basename(curFile)) my_api.printInfo("") my_api.printInfo("[lakeSPProcessing] --> %d tile(s) to deal with, over %d continent(s)" % (self.nb_input_tiles, len(self.list_continent))) my_api.printInfo("") # 3 - Retrieve lake Db layer my_api.printInfo("[lakeTileProcessing] > 3 - Retrieving lake database layer...") if my_var.LAKE_DB == "": my_api.printInfo("[lakeTileProcessing] NO database specified -> NO link of SWOT obs with a priori lake") else: if os.path.exists(my_var.LAKE_DB): type_db = my_var.LAKE_DB.split('.')[-1] # Type of database if type_db == "shp": # Shapefile format self.objLakeDb = lake_db.LakeDb_shp(my_var.LAKE_DB) elif type_db == "sqlite": # SGLite format self.objLakeDb = lake_db.LakeDb_sqlite(my_var.LAKE_DB) else: my_api.exitWithError("[lakeTileProcessing] Lake a priori database format (%s) is unknown: must be .shp or .sqlite" % type_db) else: my_api.exitWithError("[lakeTileProcessing] ERROR = %s doesn't exist" % my_var.LAKE_DB) my_api.printInfo("")
laketile_dir = my_params["laketile_dir"] output_dir = my_params["output_dir"] cycle_num = my_params["cycle_num"] pass_num = my_params["pass_num"] shp_option = my_params["flag_prod_shp"] else: print("[ERROR]") print("Run by pge_lake_sp.py param_file.cfg [-l] [-v VERBOSE]") print("OR pge_lake_sp.py lake_tile_dir output_dir cycle_num pass_num [-shp] [-l] [-v VERBOSE]") sys.exit("indir_or_param_file is %s, not .cfg" % file_extent) # 1.3 - Test input params have been filled # 1.3.1 - LakeTile directory if laketile_dir is None: my_api.exitWithError("LakeTile directory is missing in %s" % location) # 1.3.2 - Output directory if output_dir is None: my_api.exitWithError("Output directory is missing in %s" % location) my_tools.testDir(output_dir) # Test existence of output directory here and not in pre-proc because used in 1.5 # 1.3.3 - Cycle number if cycle_num is None: my_api.exitWithError("Cycle number is missing in %s" % location) # 1.3.4 - Pass number if pass_num is None: my_api.exitWithError("Pass number is missing in %s" % location) # 1.4 - Init environment for verbose level verbose_level = my_api.setVerbose(args.verbose) print("> Verbose level = %s" % verbose_level)
cycle_num = my_params["cycle_num"] pass_num = my_params["pass_num"] shp_option = my_params["flag_prod_shp"] else: print("[ERROR]") print("Run by multi_lake_sp.py param_file.cfg [-l] [-v VERBOSE]") print( "OR multi_lake_sp.py lake_tile_dir output_dir [cycle_num [pass_num]] [-shp] [-l] [-v VERBOSE]" ) sys.exit("indir_or_param_file is %s, not .cfg" % file_extent) # 1.3 - Test input params have been filled # 1.3.1 - LakeTile directory if laketile_dir is None: my_api.exitWithError("LakeTile directory is missing in %s" % location) # 1.3.2 - Output directory if output_dir is None: my_api.exitWithError("Output directory is missing in %s" % location) my_tools.testDir( output_dir ) # Test existence of output directory here and not in pre-proc because used in 1.5 # 1.4 - Init environment for verbose level verbose_level = my_api.setVerbose(args.verbose) print("> Verbose level = %s" % verbose_level) # 1.5 - Init environment for log if args.logfile: logFile = os.path.join( output_dir, "multi_lake_sp_" +