def initialize_product(self): LOGGER.debug("Start MuscateL2ImageFileWriter::InitializeProduct() ...") l_L2BaseFilename = self._l1_image_info.get_l2_product_id() LOGGER.debug("L2Base filename: " + l_L2BaseFilename) LOGGER.debug("Output Directory: " + self._l2_output_directory) l_PublicDirectory = os.path.join(self._l2_output_directory, l_L2BaseFilename) l_DATADirectory = os.path.join(l_PublicDirectory, "DATA") # Create Directories file_utils.create_directory(l_PublicDirectory) file_utils.create_directory(l_DATADirectory) # Store filename l_L2XMLFilename = os.path.join(l_PublicDirectory, l_L2BaseFilename + "_MTD_ALL.xml") self._global_product_filename = l_L2XMLFilename
def initialize(self, l2privateimagefilenames): if self._copyprivatefroml2: LOGGER.debug("Copy Input L2 private to output private data...") # If backward mode, if the cloud rate is too high an output L2 product must be generated # A copy of the L2 input product(D - 1) is done to continue the iteration l_l2privateimagefilenamesprovider = self._l2_image_reader.L2PrivateImageFilenamesProvider #create directory file_utils.create_directory( os.path.dirname( l2privateimagefilenames.get_rta_image_filename())) # Copy all data from private directory otb_file_utils.otb_copy_image_to_file( l_l2privateimagefilenamesprovider.get_rta_image_filename(), l2privateimagefilenames.get_rta_image_filename()) otb_file_utils.otb_copy_image_to_file( l_l2privateimagefilenamesprovider.get_rtc_image_filename(), l2privateimagefilenames.get_rtc_image_filename()) otb_file_utils.otb_copy_image_to_file( l_l2privateimagefilenamesprovider.get_rcr_image_filename(), l2privateimagefilenames.get_rcr_image_filename()) otb_file_utils.otb_copy_image_to_file( l_l2privateimagefilenamesprovider.get_sto_image_filename(), l2privateimagefilenames.get_sto_image_filename()) otb_file_utils.otb_copy_image_to_file( l_l2privateimagefilenamesprovider.get_pxd_image_filename(), l2privateimagefilenames.get_pxd_image_filename()) otb_file_utils.otb_copy_image_to_file( l_l2privateimagefilenamesprovider.get_ndt_image_filename(), l2privateimagefilenames.get_ndt_image_filename()) otb_file_utils.otb_copy_image_to_file( l_l2privateimagefilenamesprovider.get_cld_image_filename(), l2privateimagefilenames.get_cld_image_filename()) #CLA can be optional otb_file_utils.otb_copy_image_to_file( l_l2privateimagefilenamesprovider.get_cla_image_filename(), l2privateimagefilenames.get_cla_image_filename(), raise_exceptions=False) otb_file_utils.otb_copy_image_to_file( l_l2privateimagefilenamesprovider.get_wam_image_filename(), l2privateimagefilenames.get_wam_image_filename()) #LTC writing can be optional otb_file_utils.otb_copy_image_to_file( l_l2privateimagefilenamesprovider.get_ltc_image_filename(), l2privateimagefilenames.get_ltc_image_filename(), raise_exceptions=False)
def preprocessing(self): # ********************************************************************************************************* # Store the Global filename of the product. Here, it's a EarthExplorer L2 product organization self.l2imagefilenamesprovider = L2ImageFilenamesProvider() self.l2imagefilenamesprovider.initialize(self.l1imageinformationsproviderbase, self.plugin.ListOfL2Resolutions, self.outputdirectory, False) # Check existence # LOGGER.debug("EarthExplorerL2ImageFileWriter::GenerateData(): l_L2ImageFilenamesProvider pointer " # + self.l2imagefilenamesprovider) if self.plugin is None: raise MajaException("Internal error: the variable m_PluginBasePointer is NULL!") if self.l1imageinformationsproviderbase is None: raise MajaException("Internal error: the variable m_L1ImageInformationsProviderBase is NULL!") if not self.initmode: if self.inputl2imagefilereader is None: raise MajaException("Internal error: the variable m_InputL2ImageFileReader is NULL!") file_utils.create_directory(self.l2imagefilenamesprovider.get_public_directory()) file_utils.create_directory(self.l2imagefilenamesprovider.get_private_filenames().get_private_directory()) # --------------------------------------------------------------------------------------------------- # Write LTC Private Header # Note necessary for Backward mode # MACCS 4.7.2 - correction pour FA 1572 if self.writeltc and self.writel2products and self.backwardmode: # --------------------------------------------------------------------------------------------- # Create the LTC directory if don't exist file_utils.create_directory( self.l2imagefilenamesprovider.get_private_filenames().get_ltc_image_dir_filename()) # else: # System::RemoveDirectory(m_L2ImageFilenamesProvider->GetPrivateFileNames().GetLTCImageDirFileName()) # itksys::SystemTools::RemoveFile(m_L2ImageFilenamesProvider->GetPrivateFileNames().GetLTCHeaderFileName(). # c_str()) # --------------------------------------------------------------------------------------------- # Create the .DBL file (zero) file_utils.touch_file(self.l2imagefilenamesprovider.get_dbl_filename()) # --------------------------------------------------------------------------------------------- # Call the PreProcessing Supperclass super(EarthExplorerL2HeaderFileWriter, self).pre_processing()
def write(self, working_dir): LOGGER.info("EarthExplorerL2ImageFileWriter:Write()") # ** ** ** ** ** ** ** ** ** ** ** ** ** ** ** ** ** ** ** ** ** ** ** ** ** ** ** ** ** ** ** ** ** ** ** ** ** # Store the Global filename of the product.Here, it 's a EarthExplorer L2 product organization l_L2ImageFilenamesProvider = L2ImageFilenamesProvider() l_L2ImageFilenamesProvider.initialize( self._l1_image_info, self._plugin.ListOfL2Resolutions, self._l2_output_directory, False) # ** ** ** ** ** ** ** ** ** ** ** ** ** ** ** ** ** ** ** ** ** ** ** ** ** ** ** ** ** ** ** ** ** ** ** ** ** # Create the Outputs directories # ** ** ** ** ** ** ** ** ** ** ** ** ** ** ** ** ** ** ** ** ** ** ** ** ** ** ** ** ** ** ** ** ** ** ** ** ** file_utils.create_directory(l_L2ImageFilenamesProvider.get_public_directory()) file_utils.create_directory(l_L2ImageFilenamesProvider.get_private_filenames().get_private_directory()) # --------------------------------------------------------------------------------------------- # If multi temporal LOGGER.debug( "EarthExplorerL2ImageFileWriter:: m_WriteLTC : " + str(self._dealingltc)) LOGGER.debug("l_L2ImageFilenamesProvider.get_private_file_names().GetDealingLTC() : " + str(l_L2ImageFilenamesProvider.get_private_filenames().get_dealing_ltc())) if self._dealingltc: file_utils.create_directory( l_L2ImageFilenamesProvider.get_private_filenames().get_ltc_image_dir_filename()) LOGGER.debug("l_L2ImageFilenamesProvider.get_private_file_names().GetLTCImageDirFileName() : " + l_L2ImageFilenamesProvider.get_private_filenames().get_ltc_image_dir_filename()) # ** ** ** ** ** ** ** ** ** ** ** ** ** ** ** ** ** ** ** ** ** ** ** ** ** ** ** ** ** ** ** ** ** ** ** ** ** self.initialize(l_L2ImageFilenamesProvider.get_private_filenames()) # ** ** ** ** ** ** ** ** ** ** ** ** ** ** ** ** ** ** ** ** ** ** ** ** ** ** ** ** ** ** ** ** ** ** ** ** ** # Store filename self._global_product_filename = l_L2ImageFilenamesProvider.get_hdr_filename() self._production_report_filename = l_L2ImageFilenamesProvider.get_pmc_filename() # ** ** ** ** ** ** ** ** ** ** ** ** ** ** ** ** ** ** ** ** ** ** ** ** ** ** ** ** ** ** ** ** ** ** ** ** ** # Store the Privates filename self._l2privateimagefilenamesprovider = l_L2ImageFilenamesProvider.get_private_filenames() # ** ** ** ** ** ** ** ** ** ** ** ** ** ** ** ** ** ** ** ** ** ** ** ** ** ** ** ** ** ** ** ** ** ** ** ** ** # Write product only if enable if self._write_l2_products: # ex: - self.m_ReflectanceQuantificationValue: 0.001 # - self.m_AOTQuantificationValue: 0.05 # - self.m_VAPQuantificationValue: 0.005 l_ReflectanceQuantificationValue = 1. / self._reflectancequantification l_AOTQuantificationValue = 1. / self._aotquantificationvalue l_VAPQuantificationValue = 1. / self._vapquantificationvalue # ** ** ** ** ** ** ** ** ** ** ** ** ** ** ** ** ** ** ** ** ** ** ** ** ** ** ** ** ** ** ** ** ** ** ** * # Write Private images # ** ** ** ** ** ** ** ** ** ** ** ** ** ** ** ** ** ** ** ** ** ** ** ** ** ** ** ** ** ** ** ** ** ** ** * self.write_private_images( l_L2ImageFilenamesProvider.get_private_filenames(), l_ReflectanceQuantificationValue, self._plugin.CLDDataBandsSelected, self._plugin.CLDCoreAlgorithmsMapBand, self._writeonlyclacld, self._dealingltc, working_dir) # ** ** ** ** ** ** ** ** ** ** ** ** ** ** ** ** ** ** ** ** ** ** ** ** ** ** ** ** ** ** ** ** ** ** ** * # Write Public images # ** ** ** ** ** ** ** ** ** ** ** ** ** ** ** ** ** ** ** ** ** ** ** ** ** ** ** ** ** ** ** ** ** ** ** * self.write_public_images( l_L2ImageFilenamesProvider, l_ReflectanceQuantificationValue, l_AOTQuantificationValue, self._aotnodatavalue, l_VAPQuantificationValue, self._vapnodatavalue, self._plugin.CLDDataBandsSelected, self._plugin.CLDCoreAlgorithmsMapBand, self._writepublicproduct, self._envcoroption, working_dir) # ** ** ** ** ** ** ** ** ** ** ** ** ** ** ** ** ** ** ** ** ** ** ** ** ** ** ** ** ** ** ** ** ** ** ** * # Write Quicklook QLK # ** ** ** ** ** ** ** ** ** ** ** ** ** ** ** ** ** ** ** ** ** ** ** ** ** ** ** ** ** ** ** ** ** ** ** * if self._writepublicproduct: self.write_quicklook_image_from_files( l_L2ImageFilenamesProvider.get_qlk_filename(), self._writepublicproduct, self._qckl_red_image, self._qckl_green_image, self._qckl_blue_image, self._quicklookminreflredband, self._quicklookmaxreflredband, self._quicklookminreflgreenband, self._quicklookmaxreflgreenband, self._quicklookminreflblueband, self._quicklookmaxreflblueband, self._real_l2_no_data, self._dtm.ALC, working_dir)
def gen_synthetised_luts(self, p_workdir): # Generate luts based on proportions of each model if self._proportions is None: raise MajaDataException( "No proportions given in athmospheric lut synthetize") l_nbfiles = 0 l_listoflistoffiles_tocr = [] l_listoflistoffiles_dirt = [] l_listoflistoffiles_dift = [] l_listoflistoffiles_albd = [] for m in list(self._proportions.keys()): LOGGER.debug("Treating model : " + m) if m not in self.GIP_L2TOCR_LookUpTableConverter or m not in self.GIP_L2DIFT_LookUpTableConverter \ or m not in self.GIP_L2DIRT_LookUpTableConverter or m not in self.GIP_L2ALBD_LookUpTableConverter: raise MajaDataException("No " + m + " in luts while asked in proportions") else: if l_nbfiles == 0: l_nbfiles = len(self.GIP_L2TOCR_LookUpTableConverter[m]. new_list_of_files) for f in range(l_nbfiles): l_listoflistoffiles_tocr.append([ (self._proportions[m], os.path.join( os.path.dirname( self.GIP_L2TOCR_LookUpTableConverter[m]. new_gipp_filename), self.GIP_L2TOCR_LookUpTableConverter[m]. new_list_of_files[f])) ]) l_listoflistoffiles_dift.append([ (self._proportions[m], os.path.join( os.path.dirname( self.GIP_L2DIFT_LookUpTableConverter[m]. new_gipp_filename), self.GIP_L2DIFT_LookUpTableConverter[m]. new_list_of_files[f])) ]) l_listoflistoffiles_dirt.append([ (self._proportions[m], os.path.join( os.path.dirname( self.GIP_L2DIRT_LookUpTableConverter[m]. new_gipp_filename), self.GIP_L2DIRT_LookUpTableConverter[m]. new_list_of_files[f])) ]) l_listoflistoffiles_albd.append([ (self._proportions[m], os.path.join( os.path.dirname( self.GIP_L2ALBD_LookUpTableConverter[m]. new_gipp_filename), self.GIP_L2ALBD_LookUpTableConverter[m]. new_list_of_files[f])) ]) elif l_nbfiles != len(self.GIP_L2TOCR_LookUpTableConverter[m]. new_list_of_files): raise MajaDataException( "Not the same number of subfiles in luts for model : " + m) else: for f in range(l_nbfiles): l_listoflistoffiles_tocr[f].append( (self._proportions[m], os.path.join( os.path.dirname( self.GIP_L2TOCR_LookUpTableConverter[m]. new_gipp_filename), self.GIP_L2TOCR_LookUpTableConverter[m]. new_list_of_files[f]))) l_listoflistoffiles_dift[f].append( (self._proportions[m], os.path.join( os.path.dirname( self.GIP_L2DIFT_LookUpTableConverter[m]. new_gipp_filename), self.GIP_L2DIFT_LookUpTableConverter[m]. new_list_of_files[f]))) l_listoflistoffiles_dirt[f].append( (self._proportions[m], os.path.join( os.path.dirname( self.GIP_L2DIRT_LookUpTableConverter[m]. new_gipp_filename), self.GIP_L2DIRT_LookUpTableConverter[m]. new_list_of_files[f]))) l_listoflistoffiles_albd[f].append( (self._proportions[m], os.path.join( os.path.dirname( self.GIP_L2ALBD_LookUpTableConverter[m]. new_gipp_filename), self.GIP_L2ALBD_LookUpTableConverter[m]. new_list_of_files[f]))) # build files new_working = p_workdir # TOCR new_gipp_tocr_filename = os.path.join(new_working, "Tmp_TOCR.HDR") new_dbldir_tocr_rel = os.path.splitext( os.path.basename(new_gipp_tocr_filename))[0] + ".DBL.DIR" new_dbldir_tocr = os.path.join(new_working, new_dbldir_tocr_rel) file_utils.create_directory(new_dbldir_tocr) new_dbl_tocr = List_Of_FilesType() # DIFT new_gipp_dift_filename = os.path.join(new_working, "Tmp_DIFT.HDR") new_dbldir_dift_rel = os.path.splitext( os.path.basename(new_gipp_dift_filename))[0] + ".DBL.DIR" new_dbldir_dift = os.path.join(new_working, new_dbldir_dift_rel) file_utils.create_directory(new_dbldir_dift) new_dbl_dift = List_Of_FilesType() # DIRT new_gipp_dirt_filename = os.path.join(new_working, "Tmp_DIRT.HDR") new_dbldir_dirt_rel = os.path.splitext( os.path.basename(new_gipp_dirt_filename))[0] + ".DBL.DIR" new_dbldir_dirt = os.path.join(new_working, new_dbldir_dirt_rel) file_utils.create_directory(new_dbldir_dirt) new_dbl_dirt = List_Of_FilesType() # ALBD new_gipp_albd_filename = os.path.join(new_working, "Tmp_ALBD.HDR") new_dbldir_albd_rel = os.path.splitext( os.path.basename(new_gipp_albd_filename))[0] + ".DBL.DIR" new_dbldir_albd = os.path.join(new_working, new_dbldir_albd_rel) file_utils.create_directory(new_dbldir_albd) new_dbl_albd = List_Of_FilesType() # Multiply and write bin files # TOCR for m in l_listoflistoffiles_tocr: l_newraw_tocr = os.path.join(new_dbldir_tocr, os.path.basename(m[0][1])) l_newraw_tocr_rel = os.path.join(new_dbldir_tocr_rel, os.path.basename(m[0][1])) for f in m: LOGGER.debug("Multiply " + f[1] + " by " + str(f[0])) multiply_and_add(f[1], l_newraw_tocr, f[0]) new_dbl_tocr.add_Relative_File_Path(l_newraw_tocr_rel) # DIFT for m in l_listoflistoffiles_dift: l_newraw_dift = os.path.join(new_dbldir_dift, os.path.basename(m[0][1])) l_newraw_dift_rel = os.path.join(new_dbldir_dift_rel, os.path.basename(m[0][1])) for f in m: LOGGER.debug("Multiply " + f[1] + " by " + str(f[0])) multiply_and_add(f[1], l_newraw_dift, f[0]) new_dbl_dift.add_Relative_File_Path(l_newraw_dift_rel) # DIRT for m in l_listoflistoffiles_dirt: l_newraw_dirt = os.path.join(new_dbldir_dirt, os.path.basename(m[0][1])) l_newraw_dirt_rel = os.path.join(new_dbldir_dirt_rel, os.path.basename(m[0][1])) for f in m: LOGGER.debug("Multiply " + f[1] + " by " + str(f[0])) multiply_and_add(f[1], l_newraw_dirt, f[0]) new_dbl_dirt.add_Relative_File_Path(l_newraw_dirt_rel) # ALBD for m in l_listoflistoffiles_albd: l_newraw_albd = os.path.join(new_dbldir_albd, os.path.basename(m[0][1])) l_newraw_albd_rel = os.path.join(new_dbldir_albd_rel, os.path.basename(m[0][1])) for f in m: LOGGER.debug("Multiply " + f[1] + " by " + str(f[0])) multiply_and_add(f[1], l_newraw_albd, f[0]) new_dbl_albd.add_Relative_File_Path(l_newraw_albd_rel) # TOCR # build root node new_lut_tocr = LUT( self.GIP_L2TOCR_LookUpTableConverter[list( self._proportions.keys())[0]].new_lut_indexes, new_dbl_tocr) # build file output = io.StringIO() output.write('<?xml version="1.0" ?>\n') new_lut_tocr.export(output, 0, name_='LUT', namespacedef_='', pretty_print=True) with open(new_gipp_tocr_filename, "w") as fh: fh.write(output.getvalue().replace(" ", " ")) LOGGER.info("Writed new gipp lut to " + new_gipp_tocr_filename) # DIFT # build root node new_lut_dift = LUT( self.GIP_L2DIFT_LookUpTableConverter[list( self._proportions.keys())[0]].new_lut_indexes, new_dbl_dift) # build file output = io.StringIO() output.write('<?xml version="1.0" ?>\n') new_lut_dift.export(output, 0, name_='LUT', namespacedef_='', pretty_print=True) with open(new_gipp_dift_filename, "w") as fh: fh.write(output.getvalue().replace(" ", " ")) LOGGER.info("Writed new gipp lut to " + new_gipp_dift_filename) # DIRT # build root node new_lut_dirt = LUT( self.GIP_L2DIRT_LookUpTableConverter[list( self._proportions.keys())[0]].new_lut_indexes, new_dbl_dirt) # build file output = io.StringIO() output.write('<?xml version="1.0" ?>\n') new_lut_dirt.export(output, 0, name_='LUT', namespacedef_='', pretty_print=True) with open(new_gipp_dirt_filename, "w") as fh: fh.write(output.getvalue().replace(" ", " ")) LOGGER.info("Writed new gipp lut to " + new_gipp_dirt_filename) # ALBD # build root node new_lut_albd = LUT( self.GIP_L2ALBD_LookUpTableConverter[list( self._proportions.keys())[0]].new_lut_indexes, new_dbl_albd) # build file output = io.StringIO() output.write('<?xml version="1.0" ?>\n') new_lut_albd.export(output, 0, name_='LUT', namespacedef_='', pretty_print=True) with open(new_gipp_albd_filename, "w") as fh: fh.write(output.getvalue().replace(" ", " ")) LOGGER.info("Writed new gipp lut to " + new_gipp_albd_filename) # Write in instance self._synthetized = True self._new_gipp_filename_tocr = new_gipp_tocr_filename self._new_gipp_filename_dift = new_gipp_dift_filename self._new_gipp_filename_dirt = new_gipp_dirt_filename self._new_gipp_filename_albd = new_gipp_albd_filename
def __init__(self, working_dir, gipp_filename, validate_schema=False, schema_path=None, use_rh=False, rh=None): self.lut_handler = GippLUTEarthExplorerXMLFileHandler( gipp_filename, validate=validate_schema, schema_path=schema_path) self.original_gipp_filename = gipp_filename self.lut_indexes = self.lut_handler.get_lut_indexes() self.list_of_files = self.lut_handler.get_list_of_packaged_dbl_files( True, True) self.new_list_of_files = [] self.new_gipp_filename = os.path.join(working_dir, os.path.basename(gipp_filename)) new_dbldir_rel = os.path.splitext( os.path.basename(gipp_filename))[0] + ".DBL.DIR" self.new_dbldir = os.path.join(working_dir, new_dbldir_rel) file_utils.create_directory(self.new_dbldir) for f in self.list_of_files: if (use_rh and "RH" + str(int(rh)) in f) or not use_rh: LOGGER.debug("Copying " + f + " to " + self.new_dbldir) file_utils.copy_file_to_directory(f, self.new_dbldir) self.new_list_of_files.append(new_dbldir_rel + os.path.sep + os.path.basename(f)) LOGGER.debug(self.new_list_of_files[-1]) # Build indexes self.new_lut_indexes = IndexesType() for ind, vals in list(self.lut_indexes.items()): if ind == "Solar_Zenith_Angle_Indexes": self.new_lut_indexes.set_Solar_Zenith_Angle_Indexes(vals) elif ind == "Relative_Azimuth_Angle_Indexes": self.new_lut_indexes.set_Relative_Azimuth_Angle_Indexes(vals) elif ind == "Altitude_Indexes": self.new_lut_indexes.set_Altitude_Indexes(vals) elif ind == "AOT_Indexes": self.new_lut_indexes.set_AOT_Indexes(vals) elif ind == "TOA_Reflectance_Indexes": self.new_lut_indexes.set_TOA_Reflectance_Indexes(vals) elif ind == "Zenithal_Angle_Indexes": self.new_lut_indexes.set_Zenithal_Angle_Indexes(vals) elif ind == "View_Zenith_Angle_Indexes": self.new_lut_indexes.set_View_Zenith_Angle_Indexes(vals) elif ind == "Reflectance_Ratio_Indexes": self.new_lut_indexes.set_Reflectance_Ratio_Indexes(vals) else: raise MajaDataException("Unhandled lut index type : " + ind) # build files new_dbl = List_Of_FilesType() for f in self.new_list_of_files: new_dbl.add_Relative_File_Path(f) # build root node new_lut = LUT(self.new_lut_indexes, new_dbl) # build file output = io.StringIO() output.write('<?xml version="1.0" ?>\n') new_lut.export(output, 0, name_='LUT', namespacedef_='', pretty_print=True) with open(self.new_gipp_filename, "w") as fh: fh.write(output.getvalue().replace(" ", " ")) LOGGER.info("Writed new gipp lut to " + self.new_gipp_filename)
def initialize(self): maja_description = """ ./maja [options] \n\n MAJA Chains \n\n CNES All rights reserved. For more details, see Copyright.txt file.\n\n Description: \n ------------\n\n The L2 processor offers advanced atmospheric correction algorithms including\n water vapour and aerosol estimates based on multitemporal data analysis.\n It also provides cloud mask generation.\n - Cloud masking\n * Cloud detection\n * Shadow detection\n - Atmospheric correction\n * Gaseous absorption correction\n * Scattering correction\n - Environment and slope correction\n * Environment effects\n * Slope correction\n - Composite image update\n The data and GIPPs files mandatory for MAJA are:\n - For L2 processing:\n * GIP_L2COMM\n * GIP_L2DIRT\n * GIP_L2DIFT\n * GIP_L2SMAC\n * GIP_L2WATV\n * GIP_L2TOCR\n * GIP_L2ALBD\n * GIP_L2SITE\n * EXO_METDTA\n * AUX_REFDE2\n Processing description:\n -----------------------\n The maja launches the following processes:\n - launches the pre-processing treatment\n * Uncompresses all data (DBL package files and BZ2 images files)\n * Check input data with the schemas\n * Deletes all tarballs (if option is enable in the Configuration file)\n * Applies a specific stylesheet on GIPPs files\n - launches the scientific-processing treatment\n * Reads image products\n * Applies algorithms\n * Formats EE and writes datas\n - launches the post-processing treatment\n * Check output data with the schemas\n * Compress BZ2 all .TIF images data files\n * Generates the .DBL image product data (L2/L3)\n\n For more details, report to the SUM (Software User Manual, ref. LAIG-MU-MAC-010-CS)\n\n Author: CS Systemes d'Information (France)\n\n User cases:\n -----------\n 1. First user case: Use only a JobOrder file to launch maja processing. \n -> use the '--jobOrder' option.\n Note: the possible values for the <Processor_Name> field are:\n * m_MAJA_L2_INIT_CHAIN \n * m_MAJA_L2_NOMINAL_CHAIN \n * m_MAJA_L2_BACKWARD_CHAIN \n 2. Second user case: Use command line parameters to launch maja processing\n -> for example, use at least the '--mode' and '--input' options\n\n """ parser = argparse.ArgumentParser( description=maja_description, # main description for help epilog='Beta', formatter_class=argparse.RawTextHelpFormatter) # displayed after help parser.add_argument("-j", "--jobOrder", help="Specify the JobOrder file (xml file)") parser.add_argument( "-l", "--loglevel", help="Log level use and set to the JobOrder generated.", choices=[ 'INFO', 'PROGRESS', 'WARNING', 'DEBUG', 'ERROR'], default='INFO') parser.add_argument( "-m", "--mode", help="Processing mode.", choices=[ 'L2INIT', 'L2NOMINAL', 'L2BACKWARD' ], default='L2INIT', required=False) parser.add_argument( "-t", "--enableTest", help="Enable/Disable the field value 'Test' set in the JobOrder generated.", action="store_true") parser.add_argument( "--stylesheet", help="XML Stylesheet filename, used to overloads parameters in the XML configuration files and GIPP files. " "See the [MU] for an example of StyleSheet.") parser.add_argument( "-acs", "--adminconf", help="Administration Configuration directory (contains for example the MAJAAdminConfigSystem.xml)") inputDescr = """Input data directory: must be contain images, all GIPPs files, the DTM, etc.). The directory must be contain only one L1 product for the 'L2INIT' mode, a list of L1 products for the 'L2BACKWARD' mode, one L1 product and one L2 product for the 'L2NOMINAL' mode and a list of L2 products """ parser.add_argument("-i", "--input", help=inputDescr, required=False) parser.add_argument( "-o", "--output", help="Output data directory (product directory). Default value: '.'", required=False) parser.add_argument( "-w", "--workingdir", help="Working directory (working 'temporary' directory). Default value: the '--output parameter value'", required=False) plugin_choices = "Available plugins : " creators = FactoryBase.get_registered("PluginBase") for crea in creators: plugin_choices = plugin_choices + crea().PluginName + " , " parser.add_argument( "-p", "--plugin", help="Output plugin to use to write L2 product. "+plugin_choices) parser.add_argument( "-ucs", "--conf", help="User Configuration directory (contains for example MAJAUserConfigSystem.xml)") parser.add_argument( "--NbThreads", type=int, help="UserConfigSystem overloads value for the parameter 'NbThreads'") parser.add_argument( "--CheckXMLFilesWithSchema", help="UserConfigSystem overloads value for the parameter 'CheckXMLFilesWithSchema'", action="store_true") parser.add_argument( "--CleanInputZipFiles", help="UserConfigSystem overloads value for the parameter 'CleanInputZipFiles'", action="store_true") parser.add_argument( "--CleanFiles", help="UserConfigSystem overloads value for the parameter 'CleanFiles'", action="store_true") parser.add_argument( "--ZipFiles", help="UserConfigSystem overloads value for the parameter 'ZipFiles'", action="store_true") parser.add_argument( "--EnableCleaningCachingDirectoryBeforeProcessing", help="UserConfigSystem overloads value for the parameter 'EnableCleaningCachingDirectoryBeforeProcessing'", action="store_true") parser.add_argument( "--EnableCleaningCachingDirectoryAfterProcessing", help="UserConfigSystem overloads value for the parameter 'EnableCleaningCachingDirectoryAfterProcessing'", action="store_true") parser.add_argument( "--EnableCleaningTemporaryDirectory", help="UserConfigSystem overloads value for the parameter 'EnableCleaningTemporaryDirectory'", action="store_true") parser.add_argument( "--TileId", help="Set the Tile id of the Sentinel2 L1 product (Only necessary for SENTINEL2 plug-in and only for " "the L1C product with PSD version < PSD 14)", required=False) parser.add_argument( "--perfos-log", help="Enable performance measurement in log", action="store_true") parser.add_argument( "--perfos-report", help="Enable synthesize performance measurement in log and xml file", action="store_true") parser.add_argument( "-v", "--version", help="Display version information and exit", action="store_true" ) args = parser.parse_args() if args.version : LOGGER.info("Maja Software Version: "+version.MAJA_VERSION) exit(0) # Set the log level self._logLevel = args.loglevel os.environ["MAJA_LOGGER_LEVEL"] = args.loglevel if args.loglevel == "INFO": LOGGER.setLevel(logging.INFO) elif args.loglevel == "PROGRESS": LOGGER.setLevel(logging.PROGRESS) elif args.loglevel == "WARNING": LOGGER.setLevel(logging.WARNING) elif args.loglevel == "DEBUG": LOGGER.setLevel(logging.DEBUG) os.environ["OTB_LOGGER_LEVEL"] = "DEBUG" elif args.loglevel == "ERROR": LOGGER.setLevel(logging.ERROR) LOGGER.info("Logger in %s mode ( %s )", args.loglevel, LOGGER.getEffectiveLevel()) if args.output is not None: self._outputDirectory = args.output + os.path.sep else: raise MajaDataException("Output option -o/--output must be specified") LOGGER.info("Output Directory : %s ", self._outputDirectory) if not os.path.exists(self._outputDirectory): LOGGER.debug("OUTPUT_DIR must be created in %s", self._outputDirectory) file_utils.create_directory(self._outputDirectory) if args.input is not None: self._inputDirectory = args.input + os.path.sep else: raise MajaDataException("Input option -i/--input must be specified") LOGGER.info("Input Directory : %s ", self._inputDirectory) if args.workingdir is not None: self._workingDirectory = args.workingdir else: self._workingDirectory = os.path.join(self._outputDirectory, ".maja-working-directory") LOGGER.info("Working Directory : %s ", self._workingDirectory) self._directory_manager = DirectoryManager(self._workingDirectory) if not os.path.exists(self._workingDirectory): LOGGER.debug("WORKING_DIR must be created in %s", self._workingDirectory) file_utils.create_directory(self._workingDirectory) if args.plugin is not None: self._output_Plugin = args.plugin if args.TileId is not None: self._tile_id = args.TileId if os.getenv("MAJA_INSTALL_DIR") is None : raise MajaProcessingException("MAJA_INSTALL_DIR is not Set") if args.stylesheet is not None: self._stylesheet = args.stylesheet # User config if args.conf is not None: self._userconf_directory = args.conf self.luserConfigSystemFileName = os.path.join(self._userconf_directory, "MAJAUserConfigSystem.xml") if not os.path.exists(self.luserConfigSystemFileName): raise MajaDataException( "The User configuration system file <" + self.luserConfigSystemFileName + "> doesn't exist!") # Load the file LOGGER.info("Using "+self.luserConfigSystemFileName+" as userConfig file") self._userConfigSystemFileName = file_utils.copy_file_to_directory(self.luserConfigSystemFileName, self._workingDirectory, notestmode=True) if self._stylesheet is not None: if not os.path.exists(self._stylesheet): raise MajaDataException(str(self._stylesheet) + " doesn't exist !!!!") translate_xsl(self._userConfigSystemFileName, self._stylesheet) self._userConf = user_conf.parse(self._userConfigSystemFileName, True) # overload values if args.CheckXMLFilesWithSchema: self._validate_schemas = True else: self._validate_schemas = self._userConf.get_Processing().get_CheckXMLFilesWithSchema() if args.CleanFiles: self._userConf.get_Processing().get_PostProcessing().set_CleanFiles(True) if args.ZipFiles: self._userConf.get_Processing().get_PostProcessing().set_ZipFiles(True) if args.CleanInputZipFiles: self._userConf.get_Processing().get_PreProcessing().set_CleanInputZipFiles(True) if args.EnableCleaningTemporaryDirectory: self._userConf.get_Computing().set_EnableCleaningTemporaryDirectory(True) if args.NbThreads is not None: self._nbThreads = args.NbThreads else: self._nbThreads = self._userConf.get_Computing().get_NbThreads() # Admin config if args.adminconf is not None: self._adminconf_directory = args.adminconf luseradminsystemfilename = os.path.join(self._adminconf_directory, "MAJAAdminConfigSystem.xml") if not os.path.exists(luseradminsystemfilename): raise MajaDataException("The Admin configuration system file <" + luseradminsystemfilename + "> doesn't exist!") self._adminConfigSystemFileName = file_utils.copy_file_to_directory( luseradminsystemfilename, self._workingDirectory, notestmode=True) if self._stylesheet is not None: translate_xsl(self._adminConfigSystemFileName, self._stylesheet) # Load the file self._adminConf = admin_conf.parse(self._adminConfigSystemFileName, True) if args.mode is not None: lProcessorName = args.mode else: raise MajaDataException("Mode must be specified : -m/--mode") self._processorName = AppHandler.MAJA_L2_NOMINAL_CHAIN if lProcessorName == AppHandler.L2INIT: self._processorName = AppHandler.MAJA_L2_INIT_CHAIN elif lProcessorName == AppHandler.L2NOMINAL: self._processorName = AppHandler.MAJA_L2_NOMINAL_CHAIN elif lProcessorName == AppHandler.L2BACKWARD: self._processorName = AppHandler.MAJA_L2_BACKWARD_CHAIN else: raise MajaDataException("Unknown mode in parameters") LOGGER.info("Processor is %s", self._processorName) LOGGER.progress("Starting " + self._processorName) LOGGER.info("Number of theads %i", self._nbThreads)
def write(self, working_dir): LOGGER.debug("Start MuscateL2ImageFileWriter::Write() ...") l_L1XMLFilename = self._l1_image_info.HeaderFilename LOGGER.debug( "Start writing the L2 product for this L1 header filename <" + l_L1XMLFilename + "> in the output directory <" + self._l2_output_directory + ">.") # L2BaseFilename: LANDSAT5-TM-XS_20100118-103000-000_L2A_EU93066200A00B_C_V1-0 l_L2BaseFilename = self._l1_image_info.get_l2_product_id() LOGGER.debug("L2Base filename: " + l_L2BaseFilename) l_PublicDirectory = os.path.join(self._l2_output_directory, l_L2BaseFilename) l_DATADirectory = os.path.join(l_PublicDirectory, "DATA") l_DateInYYYYMMDD = self._l1_image_info.ProductDateStr l_L2PrivateImageFilenames = L2PrivateImageFilenamesProvider() l_L2PrivateImageFilenames.set_dealing_ltc(self._dealingltc) l_L2PrivateImageFilenames.initialize_with_root_dir( os.path.join(l_DATADirectory, l_L2BaseFilename + "_PVD_ALL"), l_DateInYYYYMMDD) # Call super class GeneateData() self.initialize(l_L2PrivateImageFilenames) # Create the Outputs directories file_utils.create_directory(l_PublicDirectory) file_utils.create_directory(l_DATADirectory) file_utils.create_directory( l_L2PrivateImageFilenames.get_private_directory()) # If multi temporal LOGGER.debug("MuscateL2ImageFileWriter:: m_WriteLTC : " + str(self._dealingltc)) if self._dealingltc: file_utils.create_directory( l_L2PrivateImageFilenames.get_ltc_image_dir_filename()) # Store filename l_L2XMLFilename = os.path.join(l_PublicDirectory, l_L2BaseFilename + "_MTD_ALL.xml") self._global_product_filename = l_L2XMLFilename # Set the product report filename for MUSCATE handling !!! self._production_report_filename = self._l2_output_directory + "/maja_product_report.log" # Store the Privates filename self._l2privateimagefilenamesprovider = l_L2PrivateImageFilenames # Write product only if enable if self._write_l2_products: # ex : - self.m_ReflectanceQuantificationValue : 0.001 # - self.m_AOTQuantificationValue : 0.05 # - self.m_VAPQuantificationValue : 0.005 l_ReflectanceQuantificationValue = 1. / self._reflectancequantification l_AOTQuantificationValue = 1. / self._aotquantificationvalue l_VAPQuantificationValue = 1. / self._vapquantificationvalue # Write Private images self.write_private_images(l_L2PrivateImageFilenames, l_ReflectanceQuantificationValue, self._plugin.CLDDataBandsSelected, self._plugin.CLDCoreAlgorithmsMapBand, self._writeonlyclacld, self._dealingltc, working_dir) # Write Public images (include CLD public) self.write_public_images( l_PublicDirectory, l_L2BaseFilename, l_ReflectanceQuantificationValue, l_AOTQuantificationValue, self._aotnodatavalue, l_VAPQuantificationValue, self._vapnodatavalue, self._plugin.CLDDataBandsSelected, self._plugin.CLDCoreAlgorithmsMapBand, self._writepublicproduct, self._envcoroption, working_dir) l_QuicklookFilename = l_L2XMLFilename.replace( "_MTD_ALL.xml", "_QKL_ALL.jpg") LOGGER.debug("The Quicklook filename is <" + l_QuicklookFilename + ">.") # Write Quicklook QLK if self._writepublicproduct: resol_QLK = 0 l_BandsDefinitions = self._plugin.BandsDefinitions l_RedBandId, l_BlueBandId, l_GreenBandId = l_BandsDefinitions.get_l2_information_for_quicklook_band_code( self._quicklookredbandcode, self._quicklookgreenbandcode, self._quicklookbluebandcode) l_L2QuickLookFilename = l_PublicDirectory + "/" + l_L2BaseFilename + "_QKL_ALL.jpg" self.write_quicklook_image_from_files( l_L2QuickLookFilename, self._writepublicproduct, self._qckl_red_image, self._qckl_green_image, self._qckl_blue_image, self._quicklookminreflredband, self._quicklookmaxreflredband, self._quicklookminreflgreenband, self._quicklookmaxreflgreenband, self._quicklookminreflblueband, self._quicklookmaxreflblueband, self._real_l2_no_data, self._sre_list[resol_QLK], working_dir)
def write_public_images(self, p_PublicDirectory, p_L2BaseFilename, p_ReflectanceQuantificationValue, p_AOTQuantificationValue, p_AOTNodataValue, p_VAPQuantificationValue, p_VAPNodataValue, p_CLDDataBandsSelected, p_CLDCoreAlgorithmsMapBand, p_WritePublicProduct, p_EnvCorOption, working_dir): # IF PUBLIC PART OF L2 PRODUCT IS WRITTEN if p_WritePublicProduct: l_BandsDefinitions = self._plugin.BandsDefinitions l_ListOfL2Res = l_BandsDefinitions.ListOfL2Resolution l_NumberOfResolutions = len(l_ListOfL2Res) LOGGER.debug( "L2ImageFileWriterBase::Initialize Number of resolutions: " + str(l_NumberOfResolutions) + ".") # Not use the list of XS band because for Landsat8, the band B9 is not # selected in the L2 resolution (only in L2Coarse resolution) l_BaseL2FullFilename = os.path.join(p_PublicDirectory, p_L2BaseFilename) l_MASKSDirectory = os.path.join(p_PublicDirectory, "MASKS") l_BaseL2FullMASKSFilename = os.path.join(l_MASKSDirectory, p_L2BaseFilename) file_utils.create_directory(l_MASKSDirectory) resol_QLK = 0 l_RedBandId, l_BlueBandId, l_GreenBandId = l_BandsDefinitions.get_l2_information_for_quicklook_band_code( self._quicklookredbandcode, self._quicklookgreenbandcode, self._quicklookbluebandcode) # ************************************************************************************************************* # **** LOOP on RESOLUTION ********************************************* # ************************************************************************************************************* for resol in range(0, l_NumberOfResolutions): l_StrResolution = l_BandsDefinitions.ListOfL2Resolution[resol] # -------------------------------------------------------- # L2 area AreaType l_AreaToL2Resolution l_AreaFile = self._sre_list[resol] l_ListOfBand = l_BandsDefinitions.get_list_of_l2_band_code( l_StrResolution) l_NumberOfBands = len(l_ListOfBand) LOGGER.debug( "L2ImageFileReader::Gen Public image file for the resolution " + l_StrResolution + ".") l_grpSuffix = "" if l_NumberOfResolutions == 1: l_grpSuffix = "XS" else: l_grpSuffix = l_ListOfL2Res[resol] l_StrResolution = l_BandsDefinitions.ListOfL2Resolution[resol] # Read the Coef apply for SRE and FRE images LOGGER.info( "SRE and FRE values multiply by the reflectance quantification value " + str(p_ReflectanceQuantificationValue) + ".") # ************************************************************************************************************* # **** PUBLIC DATA ************************************************ # ************************************************************************************************************* # START WRITING SRE Image file DATA # Initialize the Scalar filter sre_pipeline = OtbPipelineManager() #Extract each channel for each file tmp_l2_filename_list = [] tmp_l2_image_list = [] tmp_sre_scaled = os.path.join( working_dir, "tmp_sre_multi_round_" + l_StrResolution + ".tif") param_scaled_sre = { "im": self._sre_list[resol], "coef": p_ReflectanceQuantificationValue, "out": tmp_sre_scaled } scaled_sre_app = OtbAppHandler("RoundImage", param_scaled_sre, write_output=False) sre_pipeline.add_otb_app(scaled_sre_app) for i in range(l_NumberOfBands): if resol == resol_QLK and (l_RedBandId == i or l_GreenBandId == i or l_BlueBandId == i): tmp_sre_roi = os.path.join( working_dir, "tmp_sre_roi_" + l_ListOfBand[i] + ".tif") tmp_sre_roi_app = extract_roi(self._sre_list[resol], [i], tmp_sre_roi, write_output=False) tmp_l2_image_list.append( tmp_sre_roi_app.getoutput().get("out")) tmp_l2_filename_list.append(tmp_sre_roi) if l_RedBandId == i: self._qckl_red_image = tmp_sre_roi elif l_GreenBandId == i: self._qckl_green_image = tmp_sre_roi elif l_BlueBandId == i: self._qckl_blue_image = tmp_sre_roi sre_pipeline.add_otb_app(tmp_sre_roi_app) tmp_sre_scaled_roi = os.path.join( working_dir, "tmp_sre_scaled_roi_" + l_ListOfBand[i] + ".tif") tmp_sre_scaled_roi_app = extract_roi( scaled_sre_app.getoutput().get("out"), [i], tmp_sre_scaled_roi + ":int16", write_output=False) tmp_l2_image_list.append( tmp_sre_scaled_roi_app.getoutput().get("out")) sre_pipeline.add_otb_app(tmp_sre_scaled_roi_app) tmp_l2_filename_list.append( l_BaseL2FullFilename + "_SRE_" + l_ListOfBand[i] + ".tif" + file_utils. get_extended_filename_write_image_file_standard()) # START WRITING FRE Image file DATA tmp_tgs_filename = os.path.join( working_dir, "tmp_tgs_" + l_StrResolution + ".tif") tmp_stl_filename = os.path.join( working_dir, "tmp_stl_" + l_StrResolution + ".tif") fre_pipeline = OtbPipelineManager() if p_EnvCorOption: # Initialize the Scalar filter tmp_fre_scaled = os.path.join( working_dir, "tmp_fre_multi_round_" + l_StrResolution + ".tif") param_scaled_fre = { "im": self._fre_list[resol], "coef": p_ReflectanceQuantificationValue, "out": tmp_fre_scaled } scaled_fre_app = OtbAppHandler("RoundImage", param_scaled_fre, write_output=False) fre_pipeline.add_otb_app(scaled_fre_app) # Extract each channel for each file for i in range(l_NumberOfBands): tmp_fre_roi = os.path.join( working_dir, "tmp_fre_roi_" + l_ListOfBand[i] + ".tif") tmp_fre_roi_app = extract_roi( scaled_fre_app.getoutput().get("out"), [i], tmp_fre_roi + ":int16", write_output=False) tmp_l2_image_list.append( tmp_fre_roi_app.getoutput().get("out")) fre_pipeline.add_otb_app(tmp_fre_roi_app) tmp_l2_filename_list.append( l_BaseL2FullFilename + "_FRE_" + l_ListOfBand[i] + ".tif" + file_utils. get_extended_filename_write_image_file_standard()) #Add tgs and stl also provided by the envcorr tmp_l2_image_list.append(self._tgs_list[resol]) tmp_l2_filename_list.append(tmp_tgs_filename) tmp_l2_image_list.append(self._stl_list[resol]) tmp_l2_filename_list.append(tmp_stl_filename) # START WRITING ATB Image file DATA # Initialize the Scalar filter # FA1424: Temporary Fix to address cosmetic aspects of FA1424 # VAPThreshold.SetInput(self.GetVAPImageList()[resol)) # VAPThreshold.SetOutsideValue(255. / p_VAPQuantificationValue) # VAPThreshold.ThresholdAbove(255. / p_VAPQuantificationValue) # VAPScalar.SetInput(VAPThreshold.GetOutput()) # VAPScalar.SetCoef(p_VAPQuantificationValue) atb_pipeline = OtbPipelineManager() tmp_vap = os.path.join( working_dir, "tmp_vap_scaled_" + l_StrResolution + ".tif") param_bandmath_vap = { "il": [self._l2vapimagelist[resol], self._l2edgimagelist[resol]], "exp": "(im2b1 == 1)?" + str(p_VAPNodataValue) + ":" + "im1b1*" + str(p_VAPQuantificationValue), "out": tmp_vap } vap_scal_app = OtbAppHandler("BandMathDouble", param_bandmath_vap, write_output=False) atb_pipeline.add_otb_app(vap_scal_app) tmp_round_vap = os.path.join( working_dir, "tmp_vap_round_" + l_StrResolution + ".tif") param_round_vap = { "im": vap_scal_app.getoutput().get("out"), "out": tmp_round_vap } vap_round_app = OtbAppHandler("RoundImage", param_round_vap, write_output=False) atb_pipeline.add_otb_app(vap_round_app) tmp_aot = os.path.join( working_dir, "tmp_aot_scaled_" + l_StrResolution + ".tif") param_bandmath_aot = { "il": [self._l2aotlist[resol], self._l2edgimagelist[resol]], "exp": "(im2b1 == 1)?" + str(p_AOTNodataValue) + ":" + "im1b1*" + str(p_AOTQuantificationValue), "out": tmp_aot } aot_scal_app = OtbAppHandler("BandMathDouble", param_bandmath_aot, write_output=False) atb_pipeline.add_otb_app(aot_scal_app) tmp_round_aot = os.path.join( working_dir, "tmp_aot_round_" + l_StrResolution + ".tif") param_round_aot = { "im": aot_scal_app.getoutput().get("out"), "out": tmp_round_aot } aot_round_app = OtbAppHandler("RoundImage", param_round_aot, write_output=False) atb_pipeline.add_otb_app(aot_round_app) atb_filename = l_BaseL2FullFilename + "_ATB_" + l_grpSuffix + ".tif" param_atb_concat = { "il": [ vap_round_app.getoutput().get("out"), aot_round_app.getoutput().get("out") ], "out": atb_filename + ":uint8" + file_utils. get_extended_filename_write_image_file_standard() } atb_binconcat_app = OtbAppHandler("ConcatenateImages", param_atb_concat, write_output=False) #tmp_l2_image_list.append(atb_binconcat_app.getoutput().get("out")) #tmp_l2_filename_list.append(atb_filename) atb_pipeline.add_otb_app(atb_binconcat_app) # START WRITING IAB MASK iab_pipeline = OtbPipelineManager() # Create the image list tmp_iab = os.path.join( working_dir, "tmp_iab_concat_" + l_StrResolution + ".tif") param_iab_concat = { "il": [self._l2iwcmasklist[resol], self._l2taomasklist[resol]], "out": tmp_iab + ":uint8" } tmp_iab_concat_app = OtbAppHandler("ConcatenateImages", param_iab_concat, write_output=False) iab_pipeline.add_otb_app(tmp_iab_concat_app) iab_filename = l_BaseL2FullMASKSFilename + "_IAB_" + l_grpSuffix + ".tif" param_iab_binconcat = { "im": tmp_iab_concat_app.getoutput().get("out"), "out": iab_filename + ":uint8" + file_utils. get_extended_filename_write_image_file_standard() } iab_binconcat_app = OtbAppHandler("BinaryConcatenate", param_iab_binconcat, write_output=False) iab_pipeline.add_otb_app(iab_binconcat_app) write_images([ atb_binconcat_app.getoutput().get("out"), iab_binconcat_app.getoutput().get("out") ], [atb_filename, iab_filename]) #tmp_l2_image_list.append(iab_binconcat_app.getoutput().get("out")) #tmp_l2_filename_list.append(iab_filename) # START WRITING EDG Image file DATA tmp_l2_image_list.append(self._l2edgimagelist[resol]) tmp_l2_filename_list.append(l_BaseL2FullMASKSFilename + "_EDG_" + l_grpSuffix + ".tif") #-------------------------- # Write all the images at L2 Reso write_images(tmp_l2_image_list, tmp_l2_filename_list) atb_pipeline.free_otb_app() iab_pipeline.free_otb_app() fre_pipeline.free_otb_app() sre_pipeline.free_otb_app() # -------------------------- #Caching of cloud images needed for MG2 l_cm2_index = p_CLDCoreAlgorithmsMapBand[CLOUD_MASK_ALL_CLOUDS] l_shadows_index = p_CLDCoreAlgorithmsMapBand[ CLOUD_MASK_SHADOWS] l_shadvar_index = p_CLDCoreAlgorithmsMapBand[ CLOUD_MASK_SHADVAR] # START WRITING MG2 Image file DATA l_mg2_image_list = [] mg2_pipeline = OtbPipelineManager() # Connect the WAM image wat_resampled = os.path.join( working_dir, "wat_resampled_" + l_StrResolution + ".tif") app_resample_wat = resample(self._wasimage, self._dtm.ALTList[resol], wat_resampled, method=OtbResampleType.LINEAR, threshold=0.25, write_output=False) l_mg2_image_list.append( app_resample_wat.getoutput().get("out")) mg2_pipeline.add_otb_app(app_resample_wat) # Extract the CM2 cloud mask l_mg2_image_list.append(self._l2cldlist[resol][l_cm2_index]) #Connect the SNW image if any if self._cld_snow is not None: LOGGER.debug( "Snow mask has been successfully computed, adding it to the MG2" ) snw_resampled = os.path.join( working_dir, "snw_resampled_" + l_StrResolution + ".tif") app_resample_snw = resample(self._cld_snow, self._dtm.ALTList[resol], snw_resampled, method=OtbResampleType.LINEAR, threshold=0.25, write_output=False) l_mg2_image_list.append( app_resample_snw.getoutput().get("out")) mg2_pipeline.add_otb_app(app_resample_snw) else: # Add a constant mask tmp_constant_filename = os.path.join( working_dir, "Const_shd_masks.tif") cst_snw_app = constant_image(self._dtm.ALTList[resol], 0, tmp_constant_filename + ":uint8", write_output=False) l_mg2_image_list.append(cst_snw_app.getoutput().get("out")) mg2_pipeline.add_otb_app(cst_snw_app) # Connect the shadow or mask tmp_shador_bandmath = os.path.join( working_dir, "tmp_shador_bandmath_" + l_StrResolution + ".tif") tmp_band_math_app = band_math([ self._l2cldlist[resol][l_shadows_index], self._l2cldlist[resol][l_shadvar_index] ], "im1b1 || im2b1", tmp_shador_bandmath + ":uint8", write_output=False) l_mg2_image_list.append( tmp_band_math_app.getoutput().get("out")) mg2_pipeline.add_otb_app(tmp_band_math_app) # Connect the HID image hid_resampled = os.path.join( working_dir, "hid_resampled_" + l_StrResolution + ".tif") app_resample_hid = resample(self._dtm_hid, self._dtm.ALTList[resol], hid_resampled, method=OtbResampleType.LINEAR, threshold=0.25, write_output=False) l_mg2_image_list.append( app_resample_hid.getoutput().get("out")) mg2_pipeline.add_otb_app(app_resample_hid) # Connect the SHDimage shd_resampled = os.path.join( working_dir, "shd_resampled_" + l_StrResolution + ".tif") app_resample_shd = resample(self._dtm_shd, self._dtm.ALTList[resol], shd_resampled, method=OtbResampleType.LINEAR, threshold=0.25, write_output=False) mg2_pipeline.add_otb_app(app_resample_shd) l_mg2_image_list.append( app_resample_shd.getoutput().get("out")) if p_EnvCorOption: # Append STL l_mg2_image_list.append(tmp_stl_filename) # Append TGS l_mg2_image_list.append(tmp_tgs_filename) else: # Append STL l_mg2_image_list.append(self._stl_list[resol]) # Append TGS l_mg2_image_list.append(self._tgs_list[resol]) # Concatenate all tmp_mg2 = os.path.join( working_dir, "tmp_mg2_concat_" + l_StrResolution + ".tif") param_mg2_concat = {"il": l_mg2_image_list, "out": tmp_mg2} tmp_mg2_concat_app = OtbAppHandler("ConcatenateImages", param_mg2_concat, write_output=False) param_mg2_binconcat = { "im": tmp_mg2_concat_app.getoutput().get("out"), "out": l_BaseL2FullMASKSFilename + "_MG2_" + l_grpSuffix + ".tif" + ":uint8" + file_utils. get_extended_filename_write_image_file_standard() } mg2_binconcat_app = OtbAppHandler("BinaryConcatenate", param_mg2_binconcat, write_output=True) mg2_pipeline.add_otb_app(mg2_binconcat_app) mg2_pipeline.free_otb_app() # START WRITING SAT Image file DATA # TODO Create the writer with test on number of bands param_sat_binconcat = { "im": self._l2satimagelist[resol], "out": l_BaseL2FullMASKSFilename + "_SAT_" + l_grpSuffix + ".tif" + ":uint8" + file_utils. get_extended_filename_write_image_file_standard() } sat_binconcat_app = OtbAppHandler("BinaryConcatenate", param_sat_binconcat, write_output=True) # START WRITING PIX Image file DATA if "PIXImages" in self._l1_image_info.MuscateData: LOGGER.debug( "The L1 product have 'Aberrant_Pixels' masks. There are writed in the L2 out product..." ) otb_file_utils.otb_copy_image_to_file( self._l2piximagelist[resol], l_BaseL2FullMASKSFilename + "_PIX_" + l_grpSuffix + ".tif") else: LOGGER.debug("No PIX node detected to write") # START WRITING USI Image file DATA if "Node_Useful_Image" in self._l1_image_info.MuscateData: LOGGER.debug( "The L1 product have 'Useful_Image' files. There are copied in the L2 out product..." ) # Write the USI in the Xml file, in the Useful_Imagenode ! l_XPathRootUSI_In = "//Mask[Mask_Properties/NATURE='Useful_Image']/Mask_File_List/MASK_FILE[@group_id='{}']" xnodes_in = xml_tools.get_all_values( self._l1_image_info.MuscateData["Node_Useful_Image"], l_XPathRootUSI_In.format(l_grpSuffix)) # Get the path in the xml product filename lPath = os.path.dirname(self._l1_image_info.HeaderFilename) for node_in in xnodes_in: l_FullPathFilename = os.path.join(lPath, node_in.text) # Expand threshold the file usi_resampled = l_BaseL2FullMASKSFilename + "_USI_" + l_grpSuffix + ".tif"\ + file_utils.get_extended_filename_write_mask_file_muscate() resample(l_FullPathFilename, self._dtm.ALTList[resol], usi_resampled, threshold=0.25, method=OtbResampleType.LINEAR, write_output=True) else: LOGGER.debug( "No 'Useful_Image' mask detected in the L1 product." ) # Fin si manage USI # START WRITING DFP Image file DATA (=DFP in MUSCATE) #TODO if self._l2dfpimagelist is not None: param_dfp_binconcat = { "im": self._l2dfpimagelist[resol], "out": l_BaseL2FullMASKSFilename + "_DFP_" + l_grpSuffix + ".tif" } dfp_binconcat_app = OtbAppHandler("BinaryConcatenate", param_dfp_binconcat, write_output=True) else: LOGGER.debug("DFP Masks not available.") # START WRITING CLM (CLD) Image file DATA # Connect the CLD image # Connect the CLD image # ------------------------------------- l_cld_uses_filenames = False for f in self._l2cldlist[resol]: if not otb_is_swig_pointer(f) and os.path.exists(f): l_cld_uses_filenames = True self.write_cld_image(self._l2cldlist[resol], p_CLDDataBandsSelected, l_BaseL2FullMASKSFilename + "_CLM_" + l_grpSuffix + ".tif", use_filenames=l_cld_uses_filenames) LOGGER.debug("Writing L2 resolution image done !")
def write(self): # 4.3 self.preprocessing() if self.plugin is None: raise MajaException( "Internal error: the variable m_PluginBasePointer is NULL!") # Replace L2VALD by L2NOTV # Note: do in the L2Proefssor lCurrentDate = datetime.datetime.now().strftime( EarthExplorerXMLFileHandler.XML_FORMAT_DATE) l_FileType = "SSC_L2VALD" if not self.productisvalid: # TODO #self.l2imagefilenamesprovider.InvalidateTheFilenames() l_FileType = "SSC_L2NOTV" file_utils.create_directory( self.l2imagefilenamesprovider.get_public_directory()) file_utils.create_directory( self.l2imagefilenamesprovider.get_private_filenames( ).get_private_directory()) # --------------------------------------------------------------------------------------------------- # Write LTC Private Header # Note necessary for Backward mode # --------------------------------------------------------------------------------------------------- # Write LTC Private Header # Note necessary for Backward mode if self.writeltc: # --------------------------------------------------------------------------------------------- # Create the LTC directory if don't exist file_utils.create_directory( self.l2imagefilenamesprovider.get_private_filenames( ).get_ltc_image_dir_filename()) # TODO else: # System::RemoveDirectory(self.l2imagefilenamesprovider.GetPrivateFileNames().GetLTCImageDirFileName()) # itksys::SystemTools::RemoveFile(self.l2imagefilenamesprovider.GetPrivateFileNames().GetLTCHeaderFileName().c_str()) # Create the .DBL file (zero) file_utils.touch_file(self.l2imagefilenamesprovider.get_dbl_filename()) LOGGER.debug("Start Sentinel2L2HeaderFileWriterProvider::Write() ...") # Get the "share/config" directory root_template_directory = self.apphandler.get_share_config_directory() LOGGER.debug("Root template install directory '" + root_template_directory + "'") root_shemas_directory = self.apphandler.get_schemas_root_install_dir() LOGGER.debug("Root shemas install directory '" + root_shemas_directory + "'") lSchemaLocationDirectory = root_shemas_directory + self.plugin.MAJA_INSTALL_SCHEMAS_DIR LOGGER.debug("Root SENTIENL2 install directory '" + lSchemaLocationDirectory + "'") # Read the main header l1_input_handler = MajaSentinel2L1MainXmlReader( self.l1imageinformationsproviderbase.ProductFileName, tile_id=self.apphandler.get_tile_id()) # Get the xml tile header filename tileFilename = self.l1imageinformationsproviderbase.xmlTileFilename LOGGER.info("Tile xml filename: " + tileFilename) l1_input_tile_handler = MajaSentinel2L1GranuleXmlReader(tileFilename) # Write Headers ? # MACCS 4.7.2 - correction pour FA 1572 if self.writel2products or self.backwardmode: if self.enablewriteearthexplorerheaders: # --------------------------------------------------------------------------------------------- # Writes the PUBLIC headers files if self.writel2products: self.write_public_xml_handler( self.l2imagefilenamesprovider, self.dem, self.l1imageinformationsproviderbase.UTCValidityStart, self.l1imageinformationsproviderbase.UTCValidityStop, self.l1imageinformationsproviderbase.Satellite, root_template_directory, lSchemaLocationDirectory, lCurrentDate) # --------------------------------------------------------------------------------------------- # Writes the PRIVATE others headers self.write_private_others_xmlhandler( self.l2imagefilenamesprovider.get_private_filenames(), self.l2imagefilenamesprovider. get_reference_product_header_id(), self.l2imagefilenamesprovider. get_reference_product_instance(), self.dem, self.l1imageinformationsproviderbase.UTCValidityStart, self.l1imageinformationsproviderbase.UTCValidityStop, self.l1imageinformationsproviderbase.Satellite, root_template_directory, lSchemaLocationDirectory, lCurrentDate) # --------------------------------------------------------------------------------------------- # Writes the PRIVATE TEC headers self.write_private_tec_xmlhandler( self.l2imagefilenamesprovider.get_private_filenames( ).get_hdr_private_filename(), self.l2imagefilenamesprovider. get_reference_product_header_id(), self.l1imageinformationsproviderbase.UTCValidityStart, self.l1imageinformationsproviderbase.UTCValidityStop, self.l1imageinformationsproviderbase.Satellite, self.l1imageinformationsproviderbase.Site, self.l1imageinformationsproviderbase.ProductDateStr, root_template_directory, lSchemaLocationDirectory, lCurrentDate) # --------------------------------------------------------------------------------------------- # Writes the PRIVATE STO header self.write_private_sto_xmlhandler( self.l2imagefilenamesprovider.get_private_filenames( ).get_sto_header_filename(), self.l2imagefilenamesprovider.get_reference_product_header_id( ), self.l2imagefilenamesprovider.get_reference_product_instance(), self.dem, self.l1imageinformationsproviderbase.UTCValidityStart, self.l1imageinformationsproviderbase.UTCValidityStop, self.l1imageinformationsproviderbase.Satellite, root_template_directory, lSchemaLocationDirectory, lCurrentDate) # --------------------------------------------------------------------------------------------- # Writes the PRIVATE reflectances headers (RCR, RTA, RTC) self.write_private_reflectances_xmlhandler( self.l2imagefilenamesprovider.get_private_filenames(), self.l2imagefilenamesprovider.get_reference_product_header_id( ), self.l2imagefilenamesprovider.get_reference_product_instance(), self.dem, self.l1imageinformationsproviderbase.UTCValidityStart, self.l1imageinformationsproviderbase.UTCValidityStop, self.l1imageinformationsproviderbase.Satellite, root_template_directory, lSchemaLocationDirectory, lCurrentDate) # --------------------------------------------------------------------------------------------------- # Write LTC Private Header (always, because rode in the Backward mode) # --------------------------------------------------------------------------------------------------- # Write LTC Private Header # Note necessary for Backward mode if self.writeltc: # --------------------------------------------------------------------------------------------------- # Get solar Angles #l_solarAngles = self.l1imageinformationsproviderbase.SolarAngle l_solarAngles = [ float(item) for item in list(self.l1imageinformationsproviderbase. SolarAngle.values()) ] # --------------------------------------------------------------------------------------------------- # Get viewing Angles l_viewingAngles = [] l_MeanViewingAzimuthalAngle = self.l1imageinformationsproviderbase.ListOfViewingAzimuthAnglesPerBandAtL2CoarseResolution l_MeanViewingZenithalAngles = self.l1imageinformationsproviderbase.ListOfViewingZenithAnglesPerBandAtL2CoarseResolution l_viewingAngles.append( statistics.mean( [float(a) for a in l_MeanViewingZenithalAngles])) l_viewingAngles.append( statistics.mean( [float(a) for a in l_MeanViewingAzimuthalAngle])) # --------------------------------------------------------------------------------------------------- # Write LTC Handler self.write_private_ltc_xmlhandler( self.inputl2imagefilereader, self.l2imagefilenamesprovider. get_reference_product_header_id(), self.l2imagefilenamesprovider. get_reference_product_instance(), os.path.join(root_template_directory, self.plugin.TEMPLATE_PDTANX_PRIVATE_LTC_HDR), self.l2imagefilenamesprovider.get_private_filenames(), lSchemaLocationDirectory, lCurrentDate, self.l1imageinformationsproviderbase.UTCValidityStart, self.l1imageinformationsproviderbase.UTCValidityStop, l_solarAngles, l_viewingAngles, False) # LAIG-DM-MAC-975-CNES # Copy the detector footprint (DETFOO gml files) from the L1C product to # the L2 product, in the sub directory "MSK_DETFOO" l_ZoneMaskFileNames = l1_input_tile_handler.ListOfDetFootPrintHeaderFileName l_FullPathDetfoo_subdir = os.path.join( self.l2imagefilenamesprovider.get_public_directory(), "MSK_DETFOO") file_utils.create_directory(l_FullPathDetfoo_subdir) for f in l_ZoneMaskFileNames: file_utils.copy_file_to_directory(f, l_FullPathDetfoo_subdir) self.write_global_xmlhandler( os.path.join(root_template_directory, self.plugin.TEMPLATE_GLOBAL_HDR), l1_input_handler, l1_input_tile_handler, self.l2imagefilenamesprovider.get_hdr_filename(), l_FileType, lSchemaLocationDirectory)