def get_list_of_gipp_files(self): """ GetList_of_GIPP_Files :return: """ l_listofgipps = [] for gipp_node in xml_tools.get_all_values(self.root, GIPP_FILE): one_gipp = dict() one_gipp["index"] = xml_tools.get_only_value(gipp_node, GIPP_FILE_INDEX) one_gipp["file_location"] = xml_tools.get_xml_string_value(gipp_node, GIPP_FILE_LOCATION) one_gipp["nature"] = xml_tools.get_xml_string_value(gipp_node, GIPP_FILE_NATURE) one_gipp["logical_name"] = xml_tools.get_xml_string_value(gipp_node, GIPP_FILE_LOGICAL_NAME) l_listofgipps.append(one_gipp) return l_listofgipps
def get_l2_mg2_filename(self, resol): # Get the path in the xml product filename lpath = os.path.dirname(self.main_xml_file) localpath = "//Mask_List/Mask[Mask_Properties/NATURE='Water']/Mask_File_List/MASK_FILE[@group_id='" + \ resol + "']" return os.path.join( lpath, xml_tools.get_xml_string_value(self.root, localpath))
def get_l2_atb_filename(self, resol): # Get the path in the xml product filename lpath = os.path.dirname(self.main_xml_file) localpath = "//Image[Image_Properties/NATURE='Water_Vapor_Content']/Image_File_List/IMAGE_FILE[@group_id='" \ + resol + "']" return os.path.join( lpath, xml_tools.get_xml_string_value(self.root, localpath))
def get_reference_site_definition_id(self): """ GetReference_SiteDefinition_Id :return: """ return xml_tools.get_xml_string_value(self.root, REFERENCE_SITE_DEFINITION_ID)
def get_vie_image_filename(self, p_det, p_axis): # Get the path in the xml product filename lpath = os.path.dirname(self.main_xml_file) localpath = "//Data_List/Data[Data_Properties/NATURE='Viewing_Angles_Grid']/" + \ "Data_File_List/DATA_FILE[@detector_id='" + \ p_det + "'][@axis='" + p_axis + "']" return os.path.join( lpath, xml_tools.get_xml_string_value(self.root, localpath))
def get_viewing_grid_row_step(self, bandid, det): if bandid is not None: band_selector = "[@band_id='{}']".format(bandid) else: band_selector = "" row_path = "//Viewing_Incidence_Angles_Grids_List/Band_Viewing_Incidence_Angles_Grids_List{}/Viewing_Incidence_Angles_Grids[@detector_id='{}']/Zenith/ROW_STEP" return xml_tools.get_xml_string_value( self.root, row_path.format(band_selector, det))
def get_sol_image_filename(self, p_alt, p_axis): # Get the path in the xml product filename lpath = os.path.dirname(self.main_xml_file) lalt = str(p_alt) + "m" localpath = "//Data_List/Data[Data_Properties/NATURE='Solar_Angles_Grid']/Data_File_List/DATA_FILE[@altitude='"\ + \ lalt + "'][@axis='" + p_axis + "']" return os.path.join( lpath, xml_tools.get_xml_string_value(self.root, localpath))
def get_validity_start_date(self): # ex : S2A_OPER_MSI_L1C_DS_MPS__20140915T120000_S20130707T171925_N01.01 dataset = xml_tools.get_xml_string_value(self.root, "//DATASTRIP_ID") element = dataset.split('_') # start time : 20140915T120000 date = element[-3] LOGGER.debug("GetValidityStartDate: " + date) l_tm = date_utils.get_datetime_from_yyyymmddthhmmss(date) return date_utils.get_utc_from_datetime(l_tm)
def get_cams_info(self): l_cams_info = dict() l_cams_info["extinction_coeffs"] = [] if self.has_cams_info(): l_count = int( xml_tools.get_attribute( self.root, "/Earth_Explorer_Header/Variable_Header/Specific_Product_Header/Extinction_Coefs_List", "count")) # loop on coefficients for i in range(0, l_count): extcoeffs = {} l_string = str(i + 1) # retrieve description name and value extcoeffs["Description"] = xml_tools.get_xml_string_value( self.root, "/Earth_Explorer_Header/Variable_Header/" + "Specific_Product_Header/Extinction_Coefs_List/Extinction_Coef[@n=" + l_string + "]/Description") extcoeffs["Name"] = xml_tools.get_xml_string_value( self.root, "/Earth_Explorer_Header/Variable_Header/Specific_Product_Header/" + "Extinction_Coefs_List/Extinction_Coef[@n=" + l_string + "]/Name") extcoeffs["Values"] = xml_tools.as_string_list( xml_tools.get_xml_string_value( self.root, "/Earth_Explorer_Header/Variable_Header/Specific_Product_Header/" + "Extinction_Coefs_List/Extinction_Coef[@n=" + l_string + "]/Values")) l_cams_info["extinction_coeffs"].append(extcoeffs) l_cams_info["rh_dep"] = xml_tools.get_xml_bool_value( self.root, "//Earth_Explorer_Header/Variable_Header/Specific_Product_Header/RH_dep" ) l_cams_info["rh_tab"] = xml_tools.as_string_list( xml_tools.get_xml_string_value( self.root, "//Earth_Explorer_Header/Variable_Header/Specific_Product_Header/RH_Tab" )) return l_cams_info
def get_list_of_toa_image_filenames(self): listofl1toa = [] # Get the path in the xml product filename lpath = os.path.dirname(self.main_xml_file) # Get the list of bands listofbands = self.get_list_of_bands() # Band Loop for bd in listofbands: localpath = "//IMAGE_FILE[@band_id='" + bd + "']" ltoaimagefilename = os.path.join( lpath, xml_tools.get_xml_string_value(self.root, localpath)) listofl1toa.append(ltoaimagefilename) return listofl1toa
def get_list_of_l2_sat_image_filenames(self, resol): listofsat = [] # Get the path in the xml product filename lpath = os.path.dirname(self.main_xml_file) # Get the list of bands listofbands = self.get_list_of_band_code_for_resol(resol) # Band Loop for bd in listofbands: localpath = "//Mask_List/Mask[Mask_Properties/NATURE='Saturation']/Mask_File_List/MASK_FILE[@band_id='" + \ bd + "']" lsatimagefilename = os.path.join( lpath, xml_tools.get_xml_string_value(self.root, localpath)) listofsat.append(lsatimagefilename) return listofsat
def get_list_of_defective_pixel_image_filenames(self): listoffname = [] # Get the path in the xml product filename lpath = os.path.dirname(self.main_xml_file) # Get the list of bands listofbands = self.get_list_of_bands() # Band Loop for bd in listofbands: localpath = "//Mask_List/Mask[Mask_Properties/NATURE='Defective_Pixel']/" + \ "Mask_File_List/MASK_FILE[@band_id='" + bd + "']" ldefectimagefilename = os.path.join( lpath, xml_tools.get_xml_string_value(self.root, localpath)) listoffname.append(ldefectimagefilename) return listoffname
def get_list_of_l1_ndt_image_filenames(self): listofndt = [] # Get the path in the xml product filename lpath = os.path.dirname(self.main_xml_file) # Get the list of bands listofbands = self.get_list_of_bands() # Band Loop for bd in listofbands: localpath = "//Mask_List/Mask[Mask_Properties/NATURE='Nodata']/Mask_File_List/MASK_FILE[@band_id='" \ + bd + "']" lndtimagefilename = os.path.join( lpath, xml_tools.get_xml_string_value(self.root, localpath)) listofndt.append(lndtimagefilename) return listofndt
def _get_list_of_reflectance_filenames(self, nature, resol): list_of_files = [] # Get the path in the xml product filename lpath = os.path.dirname(self.main_xml_file) # Get the list of bands listofbands = self.get_list_of_band_code_for_resol(resol) # Band Loop for bd in listofbands: localpath = "//Image[Image_Properties/NATURE='" + nature + \ "']/Image_File_List/IMAGE_FILE[@band_id='" + bd + "']" l_imagefilename = os.path.join( lpath, xml_tools.get_xml_string_value(self.root, localpath)) list_of_files.append(l_imagefilename) return list_of_files
def get_date_pdv_formated_utc(self): # Date formated: # Case1: <PRODUCTION_DATE>2014-12-17T09:36:57.0</PRODUCTION_DATE> # or # Case2: <PRODUCTION_DATE>2014-12-17T09:36:57</PRODUCTION_DATE> datepdv = xml_tools.get_xml_string_value( self.root, "//Product_Characteristics/ACQUISITION_DATE") element = datepdv.split('.') msec = '' if len(element) == 2: msec = element[1] if msec[-1] == 'Z': msec = msec[:-1] msec += '000' msec = '.' + msec[0:3] + 'Z' date = element[0] + msec # Return date formated 'YYYY-MM-DD HH:MM:SS' return "UTC=" + date
def get_map_list_of_detector_footprint_image_filenames(self): mapofdftname = [] # Get the path in the xml product filename lpath = os.path.dirname(self.main_xml_file) # Get the list of bands listofbands = self.get_list_of_bands() # Band Loop for bd in listofbands: listofzone = self.get_list_of_zones(bd) l_footprintmap = {} # detector Loop for idz in listofzone: localpath = "//Mask_List/Mask[Mask_Properties/NATURE='Detector_Footprint']/" +\ "Mask_File_List/MASK_FILE[@band_id='" + bd + "' and @detector_id='" + idz + "']" l_imagefilename = os.path.join( lpath, xml_tools.get_xml_string_value(self.root, localpath)) l_footprintmap[idz] = l_imagefilename mapofdftname.append(l_footprintmap) return mapofdftname
def get_l2_product_ltc_information(self, p_destinationrelativedir, listofltc, listofpft): # -------------------------------------- # Get the list of the LTC item l_count = int( xml_tools.get_attribute(self.root, "//List_of_LTC", "count")) # -------------------------------------- # Loop under LTC for i in range(l_count): ltc = EarthExplorerXMLFileHandler.LTCType() ltc.Date = xml_tools.get_xml_string_value( self.root, "//List_of_LTC/LTC[@sn=" + str(i + 1) + "]/Date") # Get the solar angles ltc.solarangle.azimuth = xml_tools.get_xml_string_value( self.root, "//List_of_LTC/LTC[@sn=" + str(i + 1) + "]/Solar_Angles/Image_Center/Azimuth") ltc.solarangle.zenith = xml_tools.get_xml_string_value( self.root, "//List_of_LTC/LTC[@sn=" + str(i + 1) + "]/Solar_Angles/Image_Center/Zenith") # -------------------------------------- # Get the viiewing angles ltc.viewingangle.azimuth = xml_tools.get_xml_string_value( self.root, "//List_of_LTC/LTC[@sn=" + str(i + 1) + "]/Viewing_Angles/Image_Center/Azimuth") ltc.viewingangle.zenith = xml_tools.get_xml_string_value( self.root, "//List_of_LTC/LTC[@sn=" + str(i + 1) + "]/Viewing_Angles/Image_Center/Zenith") # -------------------------------------- # Push in the list listofltc.append(ltc) # -------------------------------------- # Get the DBL packages l_count = int( xml_tools.get_attribute( self.root, "//DBL_Organization/List_of_Packaged_DBL_Files", "count")) s_count = str(l_count) # -------------------------------------- # Loop under DBL for i in range(l_count): pft = EarthExplorerXMLFileHandler.PackagedDBLFileType() l_string = str(i + 1) l_relativefilepath = xml_tools.get_xml_string_value( self.root, "//DBL_Organization/List_of_Packaged_DBL_Files[@count=" + s_count + "]/Packaged_DBL_File[@sn=" + l_string + "]/Relative_File_Path") # -------------------------------------- pft.relativefilepath = os.path.join( p_destinationrelativedir, os.path.basename(l_relativefilepath)) pft.filedefinition = xml_tools.get_xml_string_value( self.root, "//DBL_Organization/List_of_Packaged_DBL_Files[@count=" + s_count + "]/Packaged_DBL_File[@sn=" + l_string + "]/File_Definition") # -------------------------------------- # Push in the list listofpft.append(pft)
def get_nick_name(self): """ GetNickName :return: """ return xml_tools.get_xml_string_value(self.root, NICK_NAME)
def get_acquisition_date_formated_yyyymmdd(self): ident = xml_tools.get_xml_string_value( self.root, "//Product_Characteristics/ACQUISITION_DATE") element = ident.split('T') date = element[0] return date.replace("-", "")
def get_central_date(self): """ GetCentral_Date :return: """ return xml_tools.get_xml_string_value(self.root, CENTRAL_DATE)
def get_acquisition_orbit_number(self): """ GetAcquisition_Orbit_Number :return: """ return xml_tools.get_xml_string_value(self.root, ACQUI_ORBIT_NUMBER)
def get_acquisition_date(self): """ GetAcquisition_Date_Time :return: """ return xml_tools.get_xml_string_value(self.root, ACQUISITION_DATE)
def initialize(self, filename, working_dir, has_snow): file_hdr = os.path.splitext(filename)[0] + ".HDR" file_dbl = os.path.splitext(filename)[0] + ".DBL" file_dbldir = os.path.splitext(filename)[0] + ".DBL.DIR" LOGGER.info("AUX_REFDE2 filename: " + filename) # uncompress dbl uncompress_dbl_product(file_dbl) # DEM filenames provider list_of_file = os.listdir(file_dbldir) nbresol = 0 for f in list_of_file: if "_ALT" in f and "TIF" in os.path.splitext(f)[1]: nbresol = nbresol + 1 LOGGER.info("Nb resolution found " + str(nbresol)) self.initialize_res_list(nbresol) LOGGER.info( "DEMFilenamesProvider::Initialize. Nb resolution computed:" + str(len(self._resList))) for resol in self._resList: LOGGER.debug("DEMFilenamesProvider::Initialize. Prefix resol : " + resol) handler = EarthExplorerXMLFileHandler(file_hdr) list_of_dbl_files = handler.get_list_of_packaged_dbl_files(True, False) LOGGER.info("DEMFileNames found " + str(len(list_of_dbl_files)) + " files") for i in range(0, len(list_of_dbl_files)): if list_of_dbl_files[i].split('.TIF')[-1]: raise MajaDataException( "Wrong file extension detected. Delete the file: " + str(list_of_dbl_files[i])) # -------------------------------------- # Find the correct filename for fi in list_of_dbl_files: # LAIG - FA - MAC - 1610 - CNES # ../ CONTEXTES_ANOMALIES / TMA_VENUS_maccs_errors / 4398 / VE_TEST_AUX_REFDE2_BRASCHAT_0001.DBL.DIR / VE_TEST_AUX_REFDE2_BRASCHAT_0001_SLP.TIF # Extract the last value -> SLP # ../ CONTEXTES_ANOMALIES / TMA_VENUS_maccs_errors / 4398 / VE_TEST_AUX_REFDE2_BRASCHAT_0001.DBL.DIR / VE_TEST_AUX_REFDE2_BRASCHAT_0001_ALT_R1.TIF # Extract the last value -> ALT l_splitted = (os.path.splitext(os.path.basename(fi))[0]).split("_") l_lenghtlistfilenamename = len(l_splitted) # Extract the tow last values -> ex: 0001 _SLP or ALT_R1 l_keytype = l_splitted[-1] if l_lenghtlistfilenamename > 2: l_keytype = l_splitted[-2] + "_" + l_keytype # -------------------------------------- # Test if the filename is ALC if "ALC" in l_keytype: self.ALC = fi # -------------------------------------- # Test if the filename is MSK elif "MSK" in l_keytype: self.MSK = fi # -------------------------------------- # Test if the filename is ASC elif "ASC" in l_keytype: self.ASC = fi # -------------------------------------- # Test if the filename is SLC elif "SLC" in l_keytype: self.__SLCInternal = fi else: # -------------------------------------- # Lop under resolutions for res in self._resList: # -------------------------------------- # Test if the filename is SLP if "SLP" in l_keytype: if res in l_keytype: self.__SLPListInternal.append(fi) # -------------------------------------- # Test if the filename is ALT elif "ALT" in l_keytype: if res in l_keytype: self.ALTList.append(fi) # -------------------------------------- # Test if the filename is ASP elif "ASP" in l_keytype: if res in l_keytype: self.__ASPListInternal.append(fi) else: LOGGER.debug( "Unknown Filename and associated product type.") # endloop resol # -------------------------------------- # Check existent of ALC filename if not os.path.exists(self.ALC): raise MajaDataException("The ALC file '" + self.ALC + "' of the DTM doesn't exist !") # -------------------------------------- # Check existent of MSK filename if not os.path.exists(self.MSK): raise MajaDataException("The MSK file '" + self.MSK + "' of the DTM doesn't exist !") # -------------------------------------- # Check existent of SLC filename if not os.path.exists(self.__SLCInternal): raise MajaDataException("The SLC file '" + self.__SLCInternal + "' of the DTM doesn't exist !") else: LOGGER.debug("Starting multiply " + self.__SLCInternal + " * " + str(self._coeff)) self.SLC = os.path.join( working_dir, "Mul_" + os.path.basename(self.__SLCInternal)) self._apps.add_otb_app( multiply_by_scalar(self.__SLCInternal, self._coeff, output_image=self.SLC)) mtdat = GdalDatasetInfo(self.__SLCInternal) self.CoarseArea = Area() self.CoarseArea.size = mtdat.size self.CoarseArea.origin = mtdat.origin self.CoarseArea.spacing = mtdat.pixel_size LOGGER.debug("Done") # -------------------------------------- for resol in range(0, len(self._resList)): # -------------------------------------- # Check existent of SLP filename if not os.path.exists(self.__SLPListInternal[resol]): raise MajaDataException("One of the SLP file '" + self.__SLPListInternal[resol] + "' of the DTM doesn't exist !") else: LOGGER.debug("Starting multiply " + self.__SLPListInternal[resol] + " * " + str(self._coeff)) tmp = os.path.join( working_dir, "Mul_" + os.path.basename(self.__SLPListInternal[resol])) slp_mul_app = multiply_by_scalar(self.__SLPListInternal[resol], self._coeff, output_image=tmp, write_output=False) self._apps.add_otb_app(slp_mul_app) mtdat = GdalDatasetInfo(self.__SLPListInternal[resol]) l2area = Area() l2area.size = mtdat.size l2area.origin = mtdat.origin l2area.spacing = mtdat.pixel_size self.ProjRef = mtdat.dataset.GetProjectionRef() self.L2Areas.append(l2area) LOGGER.debug("Done") self.SLPList.append(slp_mul_app.getoutput().get("out")) # -------------------------------------- # Check existent of ALT filename if not os.path.exists(self.ALTList[resol]): raise MajaDataException("One of the ALT file '" + self.ALTList[resol] + "' of the DTM doesn't exist !") # -------------------------------------- # Check existent of ASP filename if not os.path.exists(self.__ASPListInternal[resol]): raise MajaDataException("One of the ASP file '" + self.__ASPListInternal[resol] + "' of the DTM doesn't exist !") else: LOGGER.debug("Starting multiply " + self.__ASPListInternal[resol] + " * " + str(self._coeff)) tmp = os.path.join( working_dir, "Mul_" + os.path.basename(self.__ASPListInternal[resol])) asp_mul_app = multiply_by_scalar(self.__ASPListInternal[resol], self._coeff, output_image=tmp, write_output=False) self._apps.add_otb_app(asp_mul_app) LOGGER.debug("Done") self.ASPList.append(asp_mul_app.getoutput().get("out")) # end loop resol LOGGER.debug(nbresol) l_cartoCode = xml_tools.get_only_value( handler.root, "//DEM_Information/Cartographic/Coordinate_Reference_System/Code", namespaces=handler.nss, check=True) l_geoCode = xml_tools.get_only_value( handler.root, "//DEM_Information/Geographic/Coordinate_Reference_System/Code", namespaces=handler.nss, check=True) if l_cartoCode is not None: self.ProjCode = l_cartoCode.text self.ProjType = "PROJECTED" elif l_geoCode is not None: self.ProjCode = l_geoCode.text self.ProjType = "GEOGRAPHIC" else: raise MajaDataException("Unknown DEM type") LOGGER.debug("DEM Projection Code: " + self.ProjCode) LOGGER.debug("DEM Projection Type: " + self.ProjType) self.Site = xml_tools.get_xml_string_value( handler.root, "//Specific_Product_Header/Instance_Id/Applicable_Site_Nick_Name", namespaces=handler.nss) if nbresol != 0: param_stats = {"im": self.ALTList[0]} stat_app = stats(self.ALTList[0]) self.ALT_Mean = stat_app.getoutput().get("mean") self.ALT_Max = stat_app.getoutput().get("max") self.ALT_Min = stat_app.getoutput().get("min") self.ALT_Stdv = stat_app.getoutput().get("stdv") self.ALT_LogicalName = "LOCAL=" + os.path.splitext( os.path.basename(file_hdr))[0] LOGGER.info("DEM Mean : " + str(self.ALT_Mean)) LOGGER.info("DEM Max : " + str(self.ALT_Max)) LOGGER.info("DEM Min : " + str(self.ALT_Min)) LOGGER.info("DEM Stdv : " + str(self.ALT_Stdv))
def get_useful_image_infos_filename(self): # Get the path in the xml product filename lpath = os.path.dirname(self.main_xml_file) localpath = "//Data_List/Data[Data_Properties/NATURE='Useful_Image_Informations_File']/Data_File_List/DATA_FILE" return os.path.join( lpath, xml_tools.get_xml_string_value(self.root, localpath))
def initialize(self, product_filename, validate=False, schema_path=None): LOGGER.info("Start Venus L1 Initialize on product " + product_filename) l_hdrfilename = os.path.splitext(product_filename)[0] + ".HDR" l_CanLoad = xml_tools.can_load_file(l_hdrfilename) if not l_CanLoad: return False rootNode = xml_tools.get_root_xml(l_hdrfilename, deannotate=True) self.Satellite = xml_tools.get_xml_string_value(rootNode, "//Mission") if not self._plugin.is_valid_with_satellite(self.Satellite): LOGGER.debug("The L1 product '" + product_filename + "' with satellite '" + self.Satellite + "' is not a VENUS product !") self.SatelliteID = self.Satellite.upper() self.PluginName = self._plugin.PluginName self.Prefix = "VE" l_File_Type = xml_tools.get_xml_string_value(rootNode, "//File_Type") filenamekey = l_File_Type.split("_") self.FileCategory = filenamekey[0] self.LevelType = filenamekey[1] self.FileClass = xml_tools.get_xml_string_value(rootNode, "//File_Class") self.Site = xml_tools.get_xml_string_value(rootNode, "//Instance_Id/Nick_Name") self.ReferenceSiteDefinitionId = xml_tools.get_xml_string_value(rootNode, "//Reference_SiteDefinition_Id") l_AcquisitionDateTime = xml_tools.get_xml_string_value(rootNode, "//Product_Information/Acquisition_Date_Time") self.ProductDate = date_utils.get_datetime_from_utc(l_AcquisitionDateTime) self.ProductDateStr = self.ProductDate.strftime('%Y%m%d') LOGGER.debug("Product Date: " + self.ProductDateStr) self.ProductId = xml_tools.get_xml_string_value(rootNode, "//Fixed_Header/File_Name") genDate = xml_tools.get_xml_string_value(rootNode, "//Processing_Information/Date_Time") genDate = genDate[4:] if genDate[-1] != 'Z': genDate = genDate + 'Z' self.GenerationDateStr = genDate self.AcquisitionStart = l_AcquisitionDateTime[4:] self.OrbitNumber = xml_tools.get_xml_string_value(rootNode, "//Product_Information/Acquisition_Orbit_Number") self.SpectralContent = "XS" self.FilenamesProvider.initialize(l_hdrfilename, validate=validate, schema_path=schema_path) self.HeaderFilename = self.FilenamesProvider.m_hdrfilename self.SOLImageFileName = self.FilenamesProvider.m_SOLImageFileName self.SOLHeaderFileName = self.FilenamesProvider.m_SOLHeaderFileName self.VIEImageFileName = self.FilenamesProvider.m_VIEImageFileName self.VIEHeaderFileName = self.FilenamesProvider.m_VIEHeaderFileName self.HeaderHandler = VenusL1HeaderImageEarthExplorerXMLFileHandler(l_hdrfilename) # Estimation of the coordinate of the central point self.CenterCorner.longitude = self.HeaderHandler.get_useful_image_geo_coverage_center_corner_long() self.CenterCorner.latitude = self.HeaderHandler.get_useful_image_geo_coverage_center_corner_lat() self.CenterCorner.column = self.HeaderHandler.get_useful_image_geo_coverage_center_corner_column() self.CenterCorner.line = self.HeaderHandler.get_useful_image_geo_coverage_center_corner_line() # Initialize the Validity Start/Stop self.UTCValidityStart = l_AcquisitionDateTime self.UTCValidityStop = l_AcquisitionDateTime # Initialize the Viewing angles for each detectors (Zenith and Azimuth) self.ListOfViewingZenithAnglesPerBandAtL2CoarseResolution = [] self.ListOfViewingAzimuthAnglesPerBandAtL2CoarseResolution = [] l_meanViewingZenith = 0.0 l_meanViewingAzimuth = 0.0 l_count = 0.0 l_BandsDefinitions = self._plugin.BandsDefinitions for det in l_BandsDefinitions.DetectorMap: l_Zenith = self.HeaderHandler.get_useful_image_center_view_angle_zenith(det) l_Azimuth = self.HeaderHandler.get_useful_image_center_view_angle_azimuth(det) l_meanViewingZenith += l_Zenith l_meanViewingAzimuth += l_Azimuth l_count += 1.0 self.ListOfViewingZenithAnglesPerBandAtL2CoarseResolution.append(str(l_Zenith)) self.ListOfViewingAzimuthAnglesPerBandAtL2CoarseResolution.append(str(l_Azimuth)) self.ListOfViewingAnglesPerBandAtL2CoarseResolution.append( { "incidence_zenith_angle": str(l_Zenith), "incidence_azimuth_angle": str(l_Azimuth) }) self.ViewingAngle = { "incidence_zenith_angle": str(l_meanViewingZenith / l_count), "incidence_azimuth_angle": str(l_meanViewingAzimuth / l_count) } # Fill the L2 resolution angles self.ListOfViewingAnglesPerBandAtL2Resolution = self.ListOfViewingAnglesPerBandAtL2CoarseResolution # Solar Angles self.SolarAngle = { "sun_zenith_angle": self.HeaderHandler.get_useful_image_image_center_solar_angle_zenith(), "sun_azimuth_angle": self.HeaderHandler.get_useful_image_image_center_solar_angle_azimuth() } # Detect pixel size and product size originX = xml_tools.get_xml_float_value(rootNode, "//Geo_Referencing_Information/Product_Coverage/Cartographic/Upper_Left_Corner/X") originY = xml_tools.get_xml_float_value(rootNode, "//Geo_Referencing_Information/Product_Coverage/Cartographic/Upper_Left_Corner/Y") pixSizeX = xml_tools.get_xml_float_value(rootNode, "//Product_Sampling/By_Column") pixSizeY = xml_tools.get_xml_float_value(rootNode, "//Product_Sampling/By_Line") nbCol = xml_tools.get_xml_int_value(rootNode, "//Image_Information/Size/Columns") nbRow = xml_tools.get_xml_int_value(rootNode, "//Image_Information/Size/Lines") gridColStep = (nbCol - 1.0) * pixSizeX gridRowStep = (nbRow - 1.0) * pixSizeY gridColStepStr = f"{gridColStep:.1f}" gridRowStepStr = f"{gridRowStep:.1f}" # Solar angle grid ula = xml_tools.get_xml_string_value(rootNode, "//Solar_Angles/Product/Upper_Left_Corner/Azimuth") ulz = xml_tools.get_xml_string_value(rootNode, "//Solar_Angles/Product/Upper_Left_Corner/Zenith") ura = xml_tools.get_xml_string_value(rootNode, "//Solar_Angles/Product/Upper_Right_Corner/Azimuth") urz = xml_tools.get_xml_string_value(rootNode, "//Solar_Angles/Product/Upper_Right_Corner/Zenith") lla = xml_tools.get_xml_string_value(rootNode, "//Solar_Angles/Product/Lower_Left_Corner/Azimuth") llz = xml_tools.get_xml_string_value(rootNode, "//Solar_Angles/Product/Lower_Left_Corner/Zenith") lra = xml_tools.get_xml_string_value(rootNode, "//Solar_Angles/Product/Lower_Right_Corner/Azimuth") lrz = xml_tools.get_xml_string_value(rootNode, "//Solar_Angles/Product/Lower_Right_Corner/Zenith") self.SolarAngleGrid["StepUnit"] = "m" self.SolarAngleGrid["ColStep"] = gridColStepStr self.SolarAngleGrid["RowStep"] = gridRowStepStr self.SolarAngleGrid["Azimuth"] = [ula+' '+ura, lla+' '+lra] self.SolarAngleGrid["Zenith"] = [ulz+' '+urz, llz+' '+lrz] # Viewing angle grids detectors = [1,2,3,4] self.ViewingAngleGrids = [] l_pathView = "//List_of_Viewing_Angles/Viewing_Angles[@sn='{}']/Product/{}" for det in detectors: ula = xml_tools.get_xml_string_value(rootNode, l_pathView.format(det,"Upper_Left_Corner/Azimuth")) ulz = xml_tools.get_xml_string_value(rootNode, l_pathView.format(det,"Upper_Left_Corner/Zenith")) ura = xml_tools.get_xml_string_value(rootNode, l_pathView.format(det,"Upper_Right_Corner/Azimuth")) urz = xml_tools.get_xml_string_value(rootNode, l_pathView.format(det,"Upper_Right_Corner/Zenith")) lla = xml_tools.get_xml_string_value(rootNode, l_pathView.format(det,"Lower_Left_Corner/Azimuth")) llz = xml_tools.get_xml_string_value(rootNode, l_pathView.format(det,"Lower_Left_Corner/Zenith")) lra = xml_tools.get_xml_string_value(rootNode, l_pathView.format(det,"Lower_Right_Corner/Azimuth")) lrz = xml_tools.get_xml_string_value(rootNode, l_pathView.format(det,"Lower_Right_Corner/Zenith")) self.ViewingAngleGrids.append({ "StepUnit":"m", "ColStep":gridColStepStr, "RowStep":gridRowStepStr, "Detector":str(det), "Azimuth":[ula+' '+ura, lla+' '+lra], "Zenith":[ulz+' '+urz, llz+' '+lrz] }) # Set Area by resolution curArea = Area() curArea.origin = ( f"{originX:.1f}", f"{originY:.1f}") curArea.spacing = ( f"{pixSizeX:.1f}", f"{-pixSizeY:.1f}") curArea.size = ( str(nbCol), str(nbRow)) self.AreaByResolution = [curArea] # Gather spectral information l_resol = l_BandsDefinitions.ListOfL1Resolution[0] l_pathAk = "//List_of_Aks/Ak[@sk='{}']" l_pathPolarCoef = "//List_of_Polarization_Coefficients/Polarization_Coefficient[@sk='{}']" l_pathWavelenghCentral = "//List_of_Band_Central_Wavelength/Band_Central_Wavelength[@sk='{}']" self.SpectralInfo = [] for b, bidx in l_BandsDefinitions.L1BandMap.items(): bcode = l_BandsDefinitions.L1ListOfBandsMap[l_resol][bidx] self.SpectralInfo.append({ "Band":b.replace("B0", "B"), "Ak":xml_tools.get_xml_string_value(rootNode,l_pathAk.format(bcode)) , "PolarizationCoefficient":xml_tools.get_xml_string_value(rootNode,l_pathPolarCoef.format(bcode)), "WavelengthCentral": xml_tools.get_xml_string_value(rootNode,l_pathWavelenghCentral.format(bcode)) }) # 4.2: New # Set the L1 no data value self.L1NoData = self.HeaderHandler.get_no_data_value_as_int() # Set the reflectance quantification value self.ReflectanceQuantification = self.HeaderHandler.get_reflectance_quantification_value() # Computes the real value of the L1 NoData self.RealL1NoData = self.L1NoData * self.ReflectanceQuantification return True
def get_cla_image_filename(self): # Get the path in the xml product filename lpath = os.path.dirname(self.main_xml_file) localpath = "//Data_List/Data[Data_Properties/NATURE='Cloud_Altitude_Grid']/Data_File_List/DATA_FILE" return os.path.join( lpath, xml_tools.get_xml_string_value(self.root, localpath))
def get_designated_fill_mask_filename(self): # Get the path in the xml product filename lpath = os.path.dirname(self.main_xml_file) localpath = "//Mask_List/Mask[Mask_Properties/NATURE='Designated_Fill']/Mask_File_List/MASK_FILE" return os.path.join( lpath, xml_tools.get_xml_string_value(self.root, localpath))
def muscate_initialize(self, product_filename, plugin_base, validate=False, schema_path=None): # Initialize the Image filename provider if not MajaMuscateL1ImageInformations.is_a_muscate_by_checking_the_filename( product_filename): LOGGER.debug("The filename <" + product_filename + "> is not an 'muscate' L1 header file.") return False if not MajaMuscateL1ImageInformations.is_a_muscate_by_checking_the_satellite( product_filename, plugin_base): LOGGER.debug( "The filename <" + product_filename + "> is not an 'muscate' L1 header file (by reading platform in the xml file)." ) return False # Init XML handler lHandler = MuscateXMLFileHandler(product_filename, validate=validate, schema_path=schema_path) # Store the satellite self.Satellite = lHandler.get_string_value_of("Platform") self.SatelliteID = self.Satellite.upper().replace("-", "") # Store the plugin name self.PluginName = plugin_base.PluginName self.ProductFileName = product_filename self.FileCategory = plugin_base.ShortFileType # LSC self.LevelType = "L1VALD" self.Prefix = self.Satellite self.FileClass = "TEST" # LANDSAT5-TM-XSTH... self.Site = lHandler.get_string_value_of("ZoneGeo") self.ProductDateStr = lHandler.get_acquisition_date_formated_yyyymmdd( ) # YYYYMMDD # LANDSAT5-TM-XSTH_20100118-103000-000_L1C_EU93066200A00B_C_V1-0 self.ProductId = lHandler.get_string_value_of("ProductId") self.ProductVersion = lHandler.get_string_value_of("ProductVersion") l_DatePDV = lHandler.get_date_pdv_formated_utc( ) # UTC=2010-01-18T12:00:00 self.ProductDate = date_utils.get_datetime_from_utc(l_DatePDV) self.GenerationDateStr = lHandler.get_string_value_of("ProductionDate") self.AcquisitionStart = lHandler.get_string_value_of("AcquisitionDate") self.OrbitNumber = lHandler.get_string_value_of("OrbitNumber") self.ReferenceSiteDefinitionId = "UNKNOWN" self.HeaderFilename = product_filename self.HeaderHandler = lHandler if xml_tools.get_only_value(lHandler.root, "//Product_Characteristics/INSTRUMENT", check=True) is not None: self.Instrument = lHandler.get_string_value_of("Instrument") if xml_tools.get_only_value( lHandler.root, "//Product_Characteristics/SPECTRAL_CONTENT", check=True) is not None: self.SpectralContent = lHandler.get_string_value_of( "SpectralContent").replace("+", "") # VENUS specification # Store the VIE and SOL filenames (DATA and Headers) to copy in the L2 product self.SOLHeaderFileName = "" self.SOLImageFileName = "" self.VIEHeaderFileName = "" self.VIEImageFileName = "" # Initialize the parameters necessary for the core of the algorithms of MACCS # Get longitude and latitude coordinates of the product ulc = lHandler.get_upper_left_corner() lrc = lHandler.get_lower_right_corner() # Estimation of the coordinate of the central point center = lHandler.get_center() #l_Corner.Longitude = ulc[0] + (lrc[0] - ulc[0]) / 2. #l_Corner.Latitude = ulc[1] + (lrc[1] - ulc[1]) / 2. self.CenterCorner.longitude = center[0] self.CenterCorner.latitude = center[1] self.CenterCorner.column = 0 self.CenterCorner.line = 0 # Initialize the Validity Start/Stop l_UTCValidity = date_utils.get_utc_from_datetime(self.ProductDate) if l_UTCValidity.endswith('.000Z'): l_UTCValidity = l_UTCValidity[:-5] self.UTCValidityStart = l_UTCValidity self.UTCValidityStop = l_UTCValidity # Get the list of bands in a L2 product l_BandsDefinitions = plugin_base.BandsDefinitions l_ListOfBandsL2Coarse = l_BandsDefinitions.get_list_of_band_id_in_l2_coarse( ) l_NbBandsL2Coarse = len(l_ListOfBandsL2Coarse) LOGGER.debug( "l_BandsDefinitions->GetListOfBandCodeInL2Coarse -> l_NbBandsL2Coarse: " + str(l_NbBandsL2Coarse)) LOGGER.debug( "MuscateXmllHandler->GetListOfBands -> l_NbBands : " + str(len(lHandler.get_list_of_bands()))) self.ListOfViewingAnglesPerBandAtL2Resolution = [] self.ListOfViewingAnglesPerBandAtL2CoarseResolution = [] self.ListOfViewingZenithAnglesPerBandAtL2CoarseResolution = [] self.ListOfViewingAzimuthAnglesPerBandAtL2CoarseResolution = [] # Initialize the Viewing angles for each detectors (Zenith and Azimuth) # Read the constants values from the Header Envi file name # The angles must be in degree if plugin_base.WideFieldSensor: # Initialize the Viewing angles for each detectors (Zenith and Azimuth) l_MeanViewingZenithalAngles = lHandler.get_mean_viewing_zenithal_angles( ) l_MeanViewingAzimuthalAngles = lHandler.get_mean_viewing_azimuthal_angles( ) self.ViewingAngle = { "incidence_zenith_angle": str(statistics.mean(l_MeanViewingZenithalAngles)), "incidence_azimuth_angle": str(statistics.mean(l_MeanViewingAzimuthalAngles)) } # For each bands for EnviProduct for bd in range(0, len(l_ListOfBandsL2Coarse)): angles = { "incidence_zenith_angle": str(l_MeanViewingZenithalAngles[bd]), "incidence_azimuth_angle": str(l_MeanViewingAzimuthalAngles[bd]) } self.ListOfViewingAnglesPerBandAtL2CoarseResolution.append( angles) self.ListOfViewingZenithAnglesPerBandAtL2CoarseResolution.append( str(angles["incidence_zenith_angle"])) self.ListOfViewingAzimuthAnglesPerBandAtL2CoarseResolution.append( str(angles["incidence_azimuth_angle"])) else: lViewingAngles = lHandler.get_mean_viewing_angles() l_ViewAngleZenith = lViewingAngles[0] l_ViewAngleAzimuth = lViewingAngles[1] self.ViewingAngle = { "incidence_zenith_angle": str(l_ViewAngleZenith), "incidence_azimuth_angle": str(l_ViewAngleAzimuth) } # For each bands for EnviProduct for bd in l_ListOfBandsL2Coarse: angles = { "incidence_zenith_angle": str(l_ViewAngleZenith), "incidence_azimuth_angle": str(l_ViewAngleAzimuth) } self.ListOfViewingAnglesPerBandAtL2CoarseResolution.append( angles) self.ListOfViewingZenithAnglesPerBandAtL2CoarseResolution.append( str(angles["incidence_zenith_angle"])) self.ListOfViewingAzimuthAnglesPerBandAtL2CoarseResolution.append( str(angles["incidence_azimuth_angle"])) # Fill the L2 resolution angles l_nbRes = len(l_BandsDefinitions.ListOfL2Resolution) for r in range(0, l_nbRes): l_res = l_BandsDefinitions.ListOfL2Resolution[r] l_l2bandcodes = l_BandsDefinitions.get_list_of_l2_band_code(l_res) l_l2bandidx = [ l_BandsDefinitions.get_band_id_in_l2_coarse(b) for b in l_l2bandcodes ] for b in l_l2bandidx: self.ListOfViewingAnglesPerBandAtL2Resolution.append( self.ListOfViewingAnglesPerBandAtL2CoarseResolution[b]) # Solar Angles lSolarAngles = lHandler.get_mean_solar_angles() self.SolarAngle = { "sun_zenith_angle": lSolarAngles[0], "sun_azimuth_angle": lSolarAngles[1] } if xml_tools.get_only_value(lHandler.root, "//Angles_Grids_List/Sun_Angles_Grids", check=True) is not None: self.SolarAngleGrid["StepUnit"] = xml_tools.get_attribute( lHandler.root, "//Angles_Grids_List/Sun_Angles_Grids/Zenith", "step_unit") self.SolarAngleGrid["ColStep"] = lHandler.get_string_value_of( "SunAngleColStep") self.SolarAngleGrid["RowStep"] = lHandler.get_string_value_of( "SunAngleRowStep") self.SolarAngleGrid["Azimuth"] = lHandler.get_sun_azimuthal_angles( ) self.SolarAngleGrid["Zenith"] = lHandler.get_sun_zenithal_angles() # Viewing angle grids self.ViewingAngleGrids = [] if len( xml_tools.get_all_values( lHandler.root, "//Angles_Grids_List/Viewing_Incidence_Angles_Grids_List/Band_Viewing_Incidence_Angles_Grids_List" )): if lHandler.has_per_band_angles(): for bn, bandid in l_BandsDefinitions.L2CoarseBandMap.items(): zonelist = lHandler.get_list_of_zones(bn) zenith_values = lHandler.get_viewing_zenithal_angles(bn) azimuth_values = lHandler.get_viewing_azimuthal_angles(bn) LOGGER.debug("Viewing Angle grid for band " + str(bn)) for d, det in enumerate(zonelist): self.ViewingAngleGrids.append({ "StepUnit": lHandler.get_viewing_grid_step_unit(bn, det), "ColStep": lHandler.get_viewing_grid_col_step(bn, det), "RowStep": lHandler.get_viewing_grid_row_step(bn, det), "Band": str(bandid), "Detector": det.lstrip('0'), "Azimuth": azimuth_values[d], "Zenith": zenith_values[d] }) else: zonelist = lHandler.get_list_of_zones("dummy") for det in zonelist: LOGGER.debug("Viewing Angle grid for det " + det) self.ViewingAngleGrids.append({ "StepUnit": lHandler.get_viewing_grid_step_unit(bandid=None, det=det), "ColStep": lHandler.get_viewing_grid_col_step(bandid=None, det=det), "RowStep": lHandler.get_viewing_grid_row_step(bandid=None, det=det), "Detector": det.lstrip('0'), "Azimuth": lHandler.get_viewing_azimuthal_angles(det)[0], "Zenith": lHandler.get_viewing_zenithal_angles(det)[0] }) # Area by resolution self.AreaByResolution = [] l_grpSuffixes = l_BandsDefinitions.ListOfL2Resolution if len(l_grpSuffixes) == 1: l_grpSuffixes = ["XS"] for res in l_grpSuffixes: l_path_group_geo = "//Group_Geopositioning_List/Group_Geopositioning[@group_id='{}']/{}" curArea = Area() curArea.origin = (xml_tools.get_xml_string_value( lHandler.root, l_path_group_geo.format(res, "ULX")), xml_tools.get_xml_string_value( lHandler.root, l_path_group_geo.format(res, "ULY"))) curArea.spacing = (xml_tools.get_xml_string_value( lHandler.root, l_path_group_geo.format(res, "XDIM")), xml_tools.get_xml_string_value( lHandler.root, l_path_group_geo.format(res, "YDIM"))) curArea.size = (xml_tools.get_xml_string_value( lHandler.root, l_path_group_geo.format(res, "NCOLS")), xml_tools.get_xml_string_value( lHandler.root, l_path_group_geo.format(res, "NROWS"))) self.AreaByResolution.append(curArea) # Spectral information self.SpectralInfo = [] l_pathBase = "//Spectral_Band_Informations_List/Spectral_Band_Informations[@band_id='{}']" l_pathNativeCoeff = "/Calibration_Coefficients_Lists/Native_Coefficients_List/COEFFICIENT[@name='{}']" l_pathRadiance = "/SOLAR_IRRADIANCE" l_pathWavelength = "/Wavelength/{}" l_pathResponse = "/Spectral_Response/{}" l_pathsSpecInfo = { 'PhysicalGain': l_pathNativeCoeff.format("PhysicalGain"), 'LuminanceMax': l_pathNativeCoeff.format("LuminanceMax"), 'LuminanceMin': l_pathNativeCoeff.format("LuminanceMin"), 'QuantizeCalMax': l_pathNativeCoeff.format("QuantizeCalMax"), 'QuantizeCalMin': l_pathNativeCoeff.format("QuantizeCalMin"), 'RadianceAdd': l_pathNativeCoeff.format("RadianceAdd"), 'RadianceMult': l_pathNativeCoeff.format("RadianceMult"), 'ReflectanceAdd': l_pathNativeCoeff.format("ReflectanceAdd"), 'ReflectanceMult': l_pathNativeCoeff.format("ReflectanceMult"), 'SolarIrradiance': l_pathRadiance, 'WavelengthMin': l_pathWavelength.format("MIN"), 'WavelengthMax': l_pathWavelength.format("MAX"), 'WavelengthCentral': l_pathWavelength.format("CENTRAL"), 'ResponseStep': l_pathResponse.format("STEP"), 'ResponseValues': l_pathResponse.format("VALUES") } for b, bidx in l_BandsDefinitions.L1BandMap.items(): specInfo = {"Band": b} realBase = l_pathBase.format(b) for measure, pathMeasure in l_pathsSpecInfo.items(): res = xml_tools.get_xml_string_value(lHandler.root, realBase + pathMeasure, check=True) if len(res): specInfo[measure] = res if measure == 'PhysicalGain': specInfo[measure] = float(res) self.SpectralInfo.append(specInfo) # ------------------------------------------------------------------------- # 4.2: New # Set the L1 no data value self.L1NoData = int(lHandler.get_string_value_of("L1NoData")) # Set the reflectance quantification value self.ReflectanceQuantification = 1. / float( lHandler.get_string_value_of("QuantificationValue")) # Computes the real value of the L1 NoData self.RealL1NoData = float( self.L1NoData) * self.ReflectanceQuantification # Save metadata related to Muscate format self.MuscateData["Node_MetadataFormat"] = xml_tools.extract_nodes( lHandler.root, "//Metadata_Identification/METADATA_FORMAT") l_NodeOriginalDataDiffuser = xml_tools.extract_nodes( lHandler.root, "//ORIGINAL_DATA_DIFFUSER") if l_NodeOriginalDataDiffuser is not None: self.MuscateData[ "Node_OriginalDataDiffuser"] = l_NodeOriginalDataDiffuser self.MuscateData[ "Node_Geoposition_Informations"] = xml_tools.extract_nodes( lHandler.root, "//Geoposition_Informations") self.MuscateData[ "Node_Geometric_Informations"] = xml_tools.extract_nodes( lHandler.root, "//Geometric_Informations") #~ self.MuscateData["Identifier"] = lIdent self.MuscateData["Authority"] = lHandler.get_string_value_of( "Authority") self.MuscateData["Producer"] = lHandler.get_string_value_of("Producer") self.MuscateData["Project"] = lHandler.get_string_value_of("Project") self.MuscateData["ZoneGeo"] = lHandler.get_string_value_of("ZoneGeo") #~ self.MuscateData["Platform"] = self.Satellite self.MuscateData["AcquisitionDate"] = lHandler.get_string_value_of( "AcquisitionDate") self.MuscateData[ "UTCAcquisitionRangeMean"] = lHandler.get_string_value_of( "UTCAcquisitionRangeMean") self.MuscateData[ "UTCAcquisitionRangeDatePrecision"] = lHandler.get_string_value_of( "UTCAcquisitionRangeDatePrecision") l_NodeSolarAnglesGrid = xml_tools.extract_nodes( lHandler.root, "//Data_List/Data[Data_Properties/NATURE='Solar_Angles_Grid']") if l_NodeSolarAnglesGrid is not None: self.MuscateData["Node_Solar_Angles_Grid"] = l_NodeSolarAnglesGrid l_NodeViewingAnglesGrid = xml_tools.extract_nodes( lHandler.root, "//Data_List/Data[Data_Properties/NATURE='Viewing_Angles_Grid']") if l_NodeViewingAnglesGrid is not None: self.MuscateData[ "Node_Viewing_Angles_Grid"] = l_NodeViewingAnglesGrid l_NodeUsefulImageInfoFile = xml_tools.extract_nodes( lHandler.root, "//Data_List/Data[Data_Properties/NATURE='Useful_Image_Informations_File']" ) if l_NodeUsefulImageInfoFile is not None: self.MuscateData[ "Node_Useful_Image_Informations_File"] = l_NodeUsefulImageInfoFile l_NodeUsefulImage = xml_tools.extract_nodes( lHandler.root, "//Mask_List/Mask[Mask_Properties/NATURE='Useful_Image']") if l_NodeUsefulImage is not None: self.MuscateData["Node_Useful_Image"] = l_NodeUsefulImage l_NodeDetFoo = xml_tools.get_only_value( lHandler.root, "//Mask_List/Mask/Mask_Properties/NATURE[.='Detector_Footprint']", check=True) if l_NodeDetFoo is not None: self.MuscateData[ "ZoneMaskFileNames"] = lHandler.get_map_list_of_detector_footprint_image_filenames( ) pix_node = xml_tools.get_only_value( lHandler.root, "//Mask_List/Mask/Mask_Properties/NATURE[.='Aberrant_Pixels']", check=True) if pix_node is not None: self.MuscateData[ "PIXImages"] = lHandler.get_list_of_pix_mask_filenames() self.MuscateData[ "PIXIndices"] = lHandler.get_list_of_pix_mask_indices() spectral_node = xml_tools.extract_nodes( lHandler.root, "//Radiometric_Informations/Spectral_Band_Informations_List") if spectral_node is not None: self.MuscateData[ "Node_Spectral_Band_Informations_List"] = spectral_node qualityGeo_node = xml_tools.extract_nodes( lHandler.root, "//Current_Product/Product_Quality_List[@level='Geo']") if qualityGeo_node is not None: self.MuscateData["Node_Product_Quality_List_Geo"] = qualityGeo_node qualityNatif_node = xml_tools.extract_nodes( lHandler.root, "//Current_Product/Product_Quality_List[@level='Natif']") if qualityNatif_node is not None: self.MuscateData[ "Node_Product_Quality_List_Natif"] = qualityNatif_node processingJob_node = xml_tools.extract_nodes( lHandler.root, "//Production_Informations/Processing_Jobs_List") if processingJob_node is not None: self.MuscateData["Node_Processing_Jobs_List"] = processingJob_node return True