def initialize(self, filename, working_dir, has_snow): file_hdr = os.path.splitext(filename)[0] + ".HDR" file_dbl = os.path.splitext(filename)[0] + ".DBL" file_dbldir = os.path.splitext(filename)[0] + ".DBL.DIR" LOGGER.info("AUX_REFDE2 filename: " + filename) # uncompress dbl uncompress_dbl_product(file_dbl) # DEM filenames provider list_of_file = os.listdir(file_dbldir) nbresol = 0 for f in list_of_file: if "_ALT" in f and "TIF" in os.path.splitext(f)[1]: nbresol = nbresol + 1 LOGGER.info("Nb resolution found " + str(nbresol)) self.initialize_res_list(nbresol) LOGGER.info( "DEMFilenamesProvider::Initialize. Nb resolution computed:" + str(len(self._resList))) for resol in self._resList: LOGGER.debug("DEMFilenamesProvider::Initialize. Prefix resol : " + resol) handler = EarthExplorerXMLFileHandler(file_hdr) list_of_dbl_files = handler.get_list_of_packaged_dbl_files(True, False) LOGGER.info("DEMFileNames found " + str(len(list_of_dbl_files)) + " files") for i in range(0, len(list_of_dbl_files)): if list_of_dbl_files[i].split('.TIF')[-1]: raise MajaDataException( "Wrong file extension detected. Delete the file: " + str(list_of_dbl_files[i])) # -------------------------------------- # Find the correct filename for fi in list_of_dbl_files: # LAIG - FA - MAC - 1610 - CNES # ../ CONTEXTES_ANOMALIES / TMA_VENUS_maccs_errors / 4398 / VE_TEST_AUX_REFDE2_BRASCHAT_0001.DBL.DIR / VE_TEST_AUX_REFDE2_BRASCHAT_0001_SLP.TIF # Extract the last value -> SLP # ../ CONTEXTES_ANOMALIES / TMA_VENUS_maccs_errors / 4398 / VE_TEST_AUX_REFDE2_BRASCHAT_0001.DBL.DIR / VE_TEST_AUX_REFDE2_BRASCHAT_0001_ALT_R1.TIF # Extract the last value -> ALT l_splitted = (os.path.splitext(os.path.basename(fi))[0]).split("_") l_lenghtlistfilenamename = len(l_splitted) # Extract the tow last values -> ex: 0001 _SLP or ALT_R1 l_keytype = l_splitted[-1] if l_lenghtlistfilenamename > 2: l_keytype = l_splitted[-2] + "_" + l_keytype # -------------------------------------- # Test if the filename is ALC if "ALC" in l_keytype: self.ALC = fi # -------------------------------------- # Test if the filename is MSK elif "MSK" in l_keytype: self.MSK = fi # -------------------------------------- # Test if the filename is ASC elif "ASC" in l_keytype: self.ASC = fi # -------------------------------------- # Test if the filename is SLC elif "SLC" in l_keytype: self.__SLCInternal = fi else: # -------------------------------------- # Lop under resolutions for res in self._resList: # -------------------------------------- # Test if the filename is SLP if "SLP" in l_keytype: if res in l_keytype: self.__SLPListInternal.append(fi) # -------------------------------------- # Test if the filename is ALT elif "ALT" in l_keytype: if res in l_keytype: self.ALTList.append(fi) # -------------------------------------- # Test if the filename is ASP elif "ASP" in l_keytype: if res in l_keytype: self.__ASPListInternal.append(fi) else: LOGGER.debug( "Unknown Filename and associated product type.") # endloop resol # -------------------------------------- # Check existent of ALC filename if not os.path.exists(self.ALC): raise MajaDataException("The ALC file '" + self.ALC + "' of the DTM doesn't exist !") # -------------------------------------- # Check existent of MSK filename if not os.path.exists(self.MSK): raise MajaDataException("The MSK file '" + self.MSK + "' of the DTM doesn't exist !") # -------------------------------------- # Check existent of SLC filename if not os.path.exists(self.__SLCInternal): raise MajaDataException("The SLC file '" + self.__SLCInternal + "' of the DTM doesn't exist !") else: LOGGER.debug("Starting multiply " + self.__SLCInternal + " * " + str(self._coeff)) self.SLC = os.path.join( working_dir, "Mul_" + os.path.basename(self.__SLCInternal)) self._apps.add_otb_app( multiply_by_scalar(self.__SLCInternal, self._coeff, output_image=self.SLC)) mtdat = GdalDatasetInfo(self.__SLCInternal) self.CoarseArea = Area() self.CoarseArea.size = mtdat.size self.CoarseArea.origin = mtdat.origin self.CoarseArea.spacing = mtdat.pixel_size LOGGER.debug("Done") # -------------------------------------- for resol in range(0, len(self._resList)): # -------------------------------------- # Check existent of SLP filename if not os.path.exists(self.__SLPListInternal[resol]): raise MajaDataException("One of the SLP file '" + self.__SLPListInternal[resol] + "' of the DTM doesn't exist !") else: LOGGER.debug("Starting multiply " + self.__SLPListInternal[resol] + " * " + str(self._coeff)) tmp = os.path.join( working_dir, "Mul_" + os.path.basename(self.__SLPListInternal[resol])) slp_mul_app = multiply_by_scalar(self.__SLPListInternal[resol], self._coeff, output_image=tmp, write_output=False) self._apps.add_otb_app(slp_mul_app) mtdat = GdalDatasetInfo(self.__SLPListInternal[resol]) l2area = Area() l2area.size = mtdat.size l2area.origin = mtdat.origin l2area.spacing = mtdat.pixel_size self.ProjRef = mtdat.dataset.GetProjectionRef() self.L2Areas.append(l2area) LOGGER.debug("Done") self.SLPList.append(slp_mul_app.getoutput().get("out")) # -------------------------------------- # Check existent of ALT filename if not os.path.exists(self.ALTList[resol]): raise MajaDataException("One of the ALT file '" + self.ALTList[resol] + "' of the DTM doesn't exist !") # -------------------------------------- # Check existent of ASP filename if not os.path.exists(self.__ASPListInternal[resol]): raise MajaDataException("One of the ASP file '" + self.__ASPListInternal[resol] + "' of the DTM doesn't exist !") else: LOGGER.debug("Starting multiply " + self.__ASPListInternal[resol] + " * " + str(self._coeff)) tmp = os.path.join( working_dir, "Mul_" + os.path.basename(self.__ASPListInternal[resol])) asp_mul_app = multiply_by_scalar(self.__ASPListInternal[resol], self._coeff, output_image=tmp, write_output=False) self._apps.add_otb_app(asp_mul_app) LOGGER.debug("Done") self.ASPList.append(asp_mul_app.getoutput().get("out")) # end loop resol LOGGER.debug(nbresol) l_cartoCode = xml_tools.get_only_value( handler.root, "//DEM_Information/Cartographic/Coordinate_Reference_System/Code", namespaces=handler.nss, check=True) l_geoCode = xml_tools.get_only_value( handler.root, "//DEM_Information/Geographic/Coordinate_Reference_System/Code", namespaces=handler.nss, check=True) if l_cartoCode is not None: self.ProjCode = l_cartoCode.text self.ProjType = "PROJECTED" elif l_geoCode is not None: self.ProjCode = l_geoCode.text self.ProjType = "GEOGRAPHIC" else: raise MajaDataException("Unknown DEM type") LOGGER.debug("DEM Projection Code: " + self.ProjCode) LOGGER.debug("DEM Projection Type: " + self.ProjType) self.Site = xml_tools.get_xml_string_value( handler.root, "//Specific_Product_Header/Instance_Id/Applicable_Site_Nick_Name", namespaces=handler.nss) if nbresol != 0: param_stats = {"im": self.ALTList[0]} stat_app = stats(self.ALTList[0]) self.ALT_Mean = stat_app.getoutput().get("mean") self.ALT_Max = stat_app.getoutput().get("max") self.ALT_Min = stat_app.getoutput().get("min") self.ALT_Stdv = stat_app.getoutput().get("stdv") self.ALT_LogicalName = "LOCAL=" + os.path.splitext( os.path.basename(file_hdr))[0] LOGGER.info("DEM Mean : " + str(self.ALT_Mean)) LOGGER.info("DEM Max : " + str(self.ALT_Max)) LOGGER.info("DEM Min : " + str(self.ALT_Min)) LOGGER.info("DEM Stdv : " + str(self.ALT_Stdv))
def initialize(self, product_filename, validate=False, schema_path=None, tile_id=None): self.ProductFileName = product_filename self.PluginName = "SENTINEL2" LOGGER.info("Start Sentinel2 L1 ImageInformationProvider " + product_filename + "...") # detect correctly formatted if (("_MTD_SAFL1C_" not in product_filename and "MTD_MSIL1C" not in product_filename) or os.path.splitext(product_filename)[1] != ".xml"): return False try: headerHandler = MajaSentinel2L1MainXmlReader(product_filename, validate, schema_path,tile_id=tile_id) except Exception as e: LOGGER.info(e) return False self.HeaderHandler = headerHandler self.Satellite = headerHandler.satellite_name.upper() self.SatelliteID = self.Satellite.upper().replace("-","") self.UniqueSatellite = "SENTINEL-2_" self.LevelType = "L1VALD" self.FileCategory = "SSC" granule_id_split = headerHandler.granule_id.split("_") self.FileClass = granule_id_split[1] self.HeaderFilename = headerHandler.main_xml_file self.Prefix = granule_id_split[0] self.L1NoData = headerHandler.get_no_data() self.ReflectanceQuantification = 1.0 / headerHandler.quantification_value self.RealL1NoData = self.L1NoData * self.ReflectanceQuantification self.xmlTileFilename = headerHandler.XmlTileFileName LOGGER.info("Tile xml filename : " + self.xmlTileFilename) tile_handler = MajaSentinel2L1GranuleXmlReader(self.xmlTileFilename) self.TileHandler = tile_handler LOGGER.debug("TileId: " + self.HeaderHandler.TileId) self.Site = maja_utils.get_formated_site(self.HeaderHandler.TileId) self.TileHandler.TileId = self.Site self.ProductDate = date_utils.get_datetime_from_utc("UTC=" + tile_handler.sensing_time) self.ProductDateStr = self.ProductDate.strftime('%Y%m%d') LOGGER.debug("Product Date: " + self.ProductDateStr) self.ProductId = headerHandler.get_string_value_of("ProductURI") LOGGER.debug("ProductID: "+self.ProductId) self.GenerationDateStr = headerHandler.get_string_value_of("GenerationTime") self.AcquisitionStart = headerHandler.get_string_value_of("ProductStartTime") self.OrbitNumber = headerHandler.get_string_value_of("OrbitNumber") l_datastrip_split = tile_handler.datastrip_id.split("_") l_start_date_str = l_datastrip_split[-3] l_stop_date_str = l_datastrip_split[-2][1:] l_start_date_time = date_utils.get_datetime_from_yyyymmddthhmmss(l_start_date_str) l_stop_date_time = date_utils.get_datetime_from_yyyymmddthhmmss(l_stop_date_str) self.UTCValidityStart = date_utils.get_utc_from_datetime(l_start_date_time) LOGGER.debug("UTCValidityStart : " + self.UTCValidityStart) self.UTCValidityStop = date_utils.get_utc_from_datetime(l_stop_date_time) LOGGER.debug("UTCValidityStop : " + self.UTCValidityStop) self.ReferenceSiteDefinitionId = "UNKNOWN" # Not available in S2 self.SOLHeaderFileName = "" self.SOLImageFileName = "" self.VIEHeaderFileName = "" self.VIEImageFileName = "" self.ListOfViewingAnglesPerBandAtL2Resolution = [] self.ListOfViewingAnglesPerBandAtL2CoarseResolution = [] self.ListOfViewingZenithAnglesPerBandAtL2CoarseResolution = [] self.ListOfViewingAzimuthAnglesPerBandAtL2CoarseResolution = [] l_MeanViewingZenithalAngles = [] l_MeanViewingAzimuthalAngles = [] for f in range(0, len(tile_handler.angles.viewing_incidence_angle.incidence_angles_mean)): angles = tile_handler.angles.viewing_incidence_angle.incidence_angles_mean[str(f)] self.ListOfViewingAnglesPerBandAtL2CoarseResolution.append(angles) self.ListOfViewingZenithAnglesPerBandAtL2CoarseResolution.append(angles["incidence_zenith_angle"]) self.ListOfViewingAzimuthAnglesPerBandAtL2CoarseResolution.append(angles["incidence_azimuth_angle"]) l_MeanViewingZenithalAngles.append(float(angles["incidence_zenith_angle"])) l_MeanViewingAzimuthalAngles.append(float(angles["incidence_azimuth_angle"])) self.ViewingAngle = { "incidence_zenith_angle": str(statistics.mean(l_MeanViewingZenithalAngles)), "incidence_azimuth_angle": str(statistics.mean(l_MeanViewingAzimuthalAngles)) } l_bandsdefinition = MajaSentinel2Plugin().BandsDefinitions l_nbRes = len(l_bandsdefinition.ListOfL2Resolution) for r in range(0, l_nbRes): l_res = l_bandsdefinition.ListOfL2Resolution[r] l_l2bandcodes = l_bandsdefinition.get_list_of_l2_band_code(l_res) l_l2bandidx = [l_bandsdefinition.get_band_id_in_l2_coarse(b) for b in l_l2bandcodes] for b in l_l2bandidx: self.ListOfViewingAnglesPerBandAtL2Resolution.append( self.ListOfViewingAnglesPerBandAtL2CoarseResolution[b]) self.ViewingAngleGrids = [] for grid in tile_handler.angles.viewing_incidence_angle.list_of_viewing_angles_grid: self.ViewingAngleGrids.append({ "StepUnit":grid.step_unit, "ColStep":grid.col_step, "RowStep":grid.row_step, "Band":grid.band_id, "Detector":grid.detector_id, "Azimuth":grid.azimuth_values, "Zenith":grid.zenith_values }) self.SolarAngle = tile_handler.angles.sun_angles.sun_angles_mean self.SolarAngleGrid["StepUnit"] = tile_handler.angles.sun_angles.step_unit self.SolarAngleGrid["ColStep"] = tile_handler.angles.sun_angles.col_step self.SolarAngleGrid["RowStep"] = tile_handler.angles.sun_angles.row_step self.SolarAngleGrid["Azimuth"] = tile_handler.angles.sun_angles.azimuth_angle_list_string_values self.SolarAngleGrid["Zenith"] = tile_handler.angles.sun_angles.zenith_angle_list_string_values # Area by resolution self.AreaByResolution = [] for res in l_bandsdefinition.ListOfL2Resolution: res_m = l_bandsdefinition.get_l1_resolution(res) l_ul = tile_handler.get_geoposition_upperleftcorner(res_m) l_spa = tile_handler.get_geoposition_dims(res_m) l_size = tile_handler.get_size(res_m) curArea = Area() curArea.origin = ( str(l_ul.x), str(l_ul.y)) curArea.spacing = ( str(l_spa.x), str(l_spa.y)) curArea.size = ( str(l_size.x), str(l_size.y)) self.AreaByResolution.append(curArea) # Spectral information self.SpectralInfo = [] l_pathGain = "//Product_Image_Characteristics/PHYSICAL_GAINS[@bandId='{}']" l_pathRadiance = "//Solar_Irradiance_List/SOLAR_IRRADIANCE[@bandId='{}']" l_pathWavelength = "//Spectral_Information_List/Spectral_Information[@bandId='{}']/Wavelength/{}" l_pathResponse = "//Spectral_Information_List/Spectral_Information[@bandId='{}']/Spectral_Response/{}" for b, bidx in l_bandsdefinition.L1BandMap.items(): self.SpectralInfo.append({ "Band":b, "PhysicalGain":float(headerHandler.get_string_value(l_pathGain.format(bidx))), "SolarIrradiance":headerHandler.get_string_value(l_pathRadiance.format(bidx)), "WavelengthMin":headerHandler.get_string_value(l_pathWavelength.format(bidx, "MIN")), "WavelengthMax":headerHandler.get_string_value(l_pathWavelength.format(bidx, "MAX")), "WavelengthCentral":headerHandler.get_string_value(l_pathWavelength.format(bidx, "CENTRAL")), "ResponseStep":headerHandler.get_string_value(l_pathResponse.format(bidx, "STEP")), "ResponseValues":headerHandler.get_string_value(l_pathResponse.format(bidx, "VALUES")) }) # TODO CenterCorner l_resolution = 10 l_boundingbox = tile_handler.get_geoposition_boundingbox(l_resolution) LOGGER.info("Geoposition BoundingBox computed: xmin, ymin, xmax, ymax: " + str(l_boundingbox.xmin) + ", " + str(l_boundingbox.ymin) + ", " + str(l_boundingbox.xmax) + ", " + str(l_boundingbox.ymax) + ".") # Transform in Lat / Long coordinates # ------------------------------------------------------------------------- # CSCode = EPSG:32615 cs_code = tile_handler.reference_system_code cs_code = maja_utils.split_string(cs_code, ':')[-1] # Estimation of the coordinate of the central point l_inputpoint_0 = l_boundingbox.xmin + (l_boundingbox.xmax - l_boundingbox.xmin) / 2.0 l_inputpoint_1 = l_boundingbox.ymin + (l_boundingbox.ymax - l_boundingbox.ymin) / 2.0 LOGGER.debug( "Geoposition BoundingBox central point computed: x, y: " + str(l_inputpoint_0) + ", " + str(l_inputpoint_1)) LOGGER.debug("Start Conversion ToWKT with the EPSG '" + cs_code + "'...") param_conv = {"carto.x": l_inputpoint_0, "carto.y": l_inputpoint_1, "mapproj": "epsg", "mapproj.epsg.code": int(cs_code) } conv_app = OtbAppHandler("ConvertCartoToGeoPoint", param_conv) LOGGER.debug("Start Conversion ToWKT done.") self.CenterCorner.longitude = conv_app.getoutput()["long"] self.CenterCorner.latitude = conv_app.getoutput()["lat"] self.CenterCorner.column = 0 self.CenterCorner.line = 0 conv_app = None return True
def initialize(self, product_filename, validate=False, schema_path=None): LOGGER.info("Start Venus L1 Initialize on product " + product_filename) l_hdrfilename = os.path.splitext(product_filename)[0] + ".HDR" l_CanLoad = xml_tools.can_load_file(l_hdrfilename) if not l_CanLoad: return False rootNode = xml_tools.get_root_xml(l_hdrfilename, deannotate=True) self.Satellite = xml_tools.get_xml_string_value(rootNode, "//Mission") if not self._plugin.is_valid_with_satellite(self.Satellite): LOGGER.debug("The L1 product '" + product_filename + "' with satellite '" + self.Satellite + "' is not a VENUS product !") self.SatelliteID = self.Satellite.upper() self.PluginName = self._plugin.PluginName self.Prefix = "VE" l_File_Type = xml_tools.get_xml_string_value(rootNode, "//File_Type") filenamekey = l_File_Type.split("_") self.FileCategory = filenamekey[0] self.LevelType = filenamekey[1] self.FileClass = xml_tools.get_xml_string_value(rootNode, "//File_Class") self.Site = xml_tools.get_xml_string_value(rootNode, "//Instance_Id/Nick_Name") self.ReferenceSiteDefinitionId = xml_tools.get_xml_string_value(rootNode, "//Reference_SiteDefinition_Id") l_AcquisitionDateTime = xml_tools.get_xml_string_value(rootNode, "//Product_Information/Acquisition_Date_Time") self.ProductDate = date_utils.get_datetime_from_utc(l_AcquisitionDateTime) self.ProductDateStr = self.ProductDate.strftime('%Y%m%d') LOGGER.debug("Product Date: " + self.ProductDateStr) self.ProductId = xml_tools.get_xml_string_value(rootNode, "//Fixed_Header/File_Name") genDate = xml_tools.get_xml_string_value(rootNode, "//Processing_Information/Date_Time") genDate = genDate[4:] if genDate[-1] != 'Z': genDate = genDate + 'Z' self.GenerationDateStr = genDate self.AcquisitionStart = l_AcquisitionDateTime[4:] self.OrbitNumber = xml_tools.get_xml_string_value(rootNode, "//Product_Information/Acquisition_Orbit_Number") self.SpectralContent = "XS" self.FilenamesProvider.initialize(l_hdrfilename, validate=validate, schema_path=schema_path) self.HeaderFilename = self.FilenamesProvider.m_hdrfilename self.SOLImageFileName = self.FilenamesProvider.m_SOLImageFileName self.SOLHeaderFileName = self.FilenamesProvider.m_SOLHeaderFileName self.VIEImageFileName = self.FilenamesProvider.m_VIEImageFileName self.VIEHeaderFileName = self.FilenamesProvider.m_VIEHeaderFileName self.HeaderHandler = VenusL1HeaderImageEarthExplorerXMLFileHandler(l_hdrfilename) # Estimation of the coordinate of the central point self.CenterCorner.longitude = self.HeaderHandler.get_useful_image_geo_coverage_center_corner_long() self.CenterCorner.latitude = self.HeaderHandler.get_useful_image_geo_coverage_center_corner_lat() self.CenterCorner.column = self.HeaderHandler.get_useful_image_geo_coverage_center_corner_column() self.CenterCorner.line = self.HeaderHandler.get_useful_image_geo_coverage_center_corner_line() # Initialize the Validity Start/Stop self.UTCValidityStart = l_AcquisitionDateTime self.UTCValidityStop = l_AcquisitionDateTime # Initialize the Viewing angles for each detectors (Zenith and Azimuth) self.ListOfViewingZenithAnglesPerBandAtL2CoarseResolution = [] self.ListOfViewingAzimuthAnglesPerBandAtL2CoarseResolution = [] l_meanViewingZenith = 0.0 l_meanViewingAzimuth = 0.0 l_count = 0.0 l_BandsDefinitions = self._plugin.BandsDefinitions for det in l_BandsDefinitions.DetectorMap: l_Zenith = self.HeaderHandler.get_useful_image_center_view_angle_zenith(det) l_Azimuth = self.HeaderHandler.get_useful_image_center_view_angle_azimuth(det) l_meanViewingZenith += l_Zenith l_meanViewingAzimuth += l_Azimuth l_count += 1.0 self.ListOfViewingZenithAnglesPerBandAtL2CoarseResolution.append(str(l_Zenith)) self.ListOfViewingAzimuthAnglesPerBandAtL2CoarseResolution.append(str(l_Azimuth)) self.ListOfViewingAnglesPerBandAtL2CoarseResolution.append( { "incidence_zenith_angle": str(l_Zenith), "incidence_azimuth_angle": str(l_Azimuth) }) self.ViewingAngle = { "incidence_zenith_angle": str(l_meanViewingZenith / l_count), "incidence_azimuth_angle": str(l_meanViewingAzimuth / l_count) } # Fill the L2 resolution angles self.ListOfViewingAnglesPerBandAtL2Resolution = self.ListOfViewingAnglesPerBandAtL2CoarseResolution # Solar Angles self.SolarAngle = { "sun_zenith_angle": self.HeaderHandler.get_useful_image_image_center_solar_angle_zenith(), "sun_azimuth_angle": self.HeaderHandler.get_useful_image_image_center_solar_angle_azimuth() } # Detect pixel size and product size originX = xml_tools.get_xml_float_value(rootNode, "//Geo_Referencing_Information/Product_Coverage/Cartographic/Upper_Left_Corner/X") originY = xml_tools.get_xml_float_value(rootNode, "//Geo_Referencing_Information/Product_Coverage/Cartographic/Upper_Left_Corner/Y") pixSizeX = xml_tools.get_xml_float_value(rootNode, "//Product_Sampling/By_Column") pixSizeY = xml_tools.get_xml_float_value(rootNode, "//Product_Sampling/By_Line") nbCol = xml_tools.get_xml_int_value(rootNode, "//Image_Information/Size/Columns") nbRow = xml_tools.get_xml_int_value(rootNode, "//Image_Information/Size/Lines") gridColStep = (nbCol - 1.0) * pixSizeX gridRowStep = (nbRow - 1.0) * pixSizeY gridColStepStr = f"{gridColStep:.1f}" gridRowStepStr = f"{gridRowStep:.1f}" # Solar angle grid ula = xml_tools.get_xml_string_value(rootNode, "//Solar_Angles/Product/Upper_Left_Corner/Azimuth") ulz = xml_tools.get_xml_string_value(rootNode, "//Solar_Angles/Product/Upper_Left_Corner/Zenith") ura = xml_tools.get_xml_string_value(rootNode, "//Solar_Angles/Product/Upper_Right_Corner/Azimuth") urz = xml_tools.get_xml_string_value(rootNode, "//Solar_Angles/Product/Upper_Right_Corner/Zenith") lla = xml_tools.get_xml_string_value(rootNode, "//Solar_Angles/Product/Lower_Left_Corner/Azimuth") llz = xml_tools.get_xml_string_value(rootNode, "//Solar_Angles/Product/Lower_Left_Corner/Zenith") lra = xml_tools.get_xml_string_value(rootNode, "//Solar_Angles/Product/Lower_Right_Corner/Azimuth") lrz = xml_tools.get_xml_string_value(rootNode, "//Solar_Angles/Product/Lower_Right_Corner/Zenith") self.SolarAngleGrid["StepUnit"] = "m" self.SolarAngleGrid["ColStep"] = gridColStepStr self.SolarAngleGrid["RowStep"] = gridRowStepStr self.SolarAngleGrid["Azimuth"] = [ula+' '+ura, lla+' '+lra] self.SolarAngleGrid["Zenith"] = [ulz+' '+urz, llz+' '+lrz] # Viewing angle grids detectors = [1,2,3,4] self.ViewingAngleGrids = [] l_pathView = "//List_of_Viewing_Angles/Viewing_Angles[@sn='{}']/Product/{}" for det in detectors: ula = xml_tools.get_xml_string_value(rootNode, l_pathView.format(det,"Upper_Left_Corner/Azimuth")) ulz = xml_tools.get_xml_string_value(rootNode, l_pathView.format(det,"Upper_Left_Corner/Zenith")) ura = xml_tools.get_xml_string_value(rootNode, l_pathView.format(det,"Upper_Right_Corner/Azimuth")) urz = xml_tools.get_xml_string_value(rootNode, l_pathView.format(det,"Upper_Right_Corner/Zenith")) lla = xml_tools.get_xml_string_value(rootNode, l_pathView.format(det,"Lower_Left_Corner/Azimuth")) llz = xml_tools.get_xml_string_value(rootNode, l_pathView.format(det,"Lower_Left_Corner/Zenith")) lra = xml_tools.get_xml_string_value(rootNode, l_pathView.format(det,"Lower_Right_Corner/Azimuth")) lrz = xml_tools.get_xml_string_value(rootNode, l_pathView.format(det,"Lower_Right_Corner/Zenith")) self.ViewingAngleGrids.append({ "StepUnit":"m", "ColStep":gridColStepStr, "RowStep":gridRowStepStr, "Detector":str(det), "Azimuth":[ula+' '+ura, lla+' '+lra], "Zenith":[ulz+' '+urz, llz+' '+lrz] }) # Set Area by resolution curArea = Area() curArea.origin = ( f"{originX:.1f}", f"{originY:.1f}") curArea.spacing = ( f"{pixSizeX:.1f}", f"{-pixSizeY:.1f}") curArea.size = ( str(nbCol), str(nbRow)) self.AreaByResolution = [curArea] # Gather spectral information l_resol = l_BandsDefinitions.ListOfL1Resolution[0] l_pathAk = "//List_of_Aks/Ak[@sk='{}']" l_pathPolarCoef = "//List_of_Polarization_Coefficients/Polarization_Coefficient[@sk='{}']" l_pathWavelenghCentral = "//List_of_Band_Central_Wavelength/Band_Central_Wavelength[@sk='{}']" self.SpectralInfo = [] for b, bidx in l_BandsDefinitions.L1BandMap.items(): bcode = l_BandsDefinitions.L1ListOfBandsMap[l_resol][bidx] self.SpectralInfo.append({ "Band":b.replace("B0", "B"), "Ak":xml_tools.get_xml_string_value(rootNode,l_pathAk.format(bcode)) , "PolarizationCoefficient":xml_tools.get_xml_string_value(rootNode,l_pathPolarCoef.format(bcode)), "WavelengthCentral": xml_tools.get_xml_string_value(rootNode,l_pathWavelenghCentral.format(bcode)) }) # 4.2: New # Set the L1 no data value self.L1NoData = self.HeaderHandler.get_no_data_value_as_int() # Set the reflectance quantification value self.ReflectanceQuantification = self.HeaderHandler.get_reflectance_quantification_value() # Computes the real value of the L1 NoData self.RealL1NoData = self.L1NoData * self.ReflectanceQuantification return True
def read(self, product_info, app_handler, l2comm, dem, pReadL1Mode): """product_info,plugin, l2comm,mode :param product_info: L1ImageInformationsBase :param pReadL1Mode: ReadL1ModeType :return: """ LOGGER.debug("Start Venus L1 ImageFileReader ...") product_filename = product_info.HeaderFilename LOGGER.debug("Start Venus L1 ImageFileReader with the filename: " + product_filename) self._plugin.initialize(app_handler) self.headerHandler = product_info.HeaderHandler self._dem = dem # working_dir = get_working_directory("L1Read_", app_handler.get_working_directory()) working_dir = app_handler.get_directory_manager( ).get_temporary_directory("L1Read_", do_always_remove=True) self.ReadL1Mode = pReadL1Mode self._GIPPL2COMMHandler = l2comm l_BandsDefinitions = self._plugin.BandsDefinitions # -------------------------------------- # Initialize the Image filename provider l_FilenameProvider = VenusL1ImageFilenames() #product_info.FilenamesProvider IsValidSatellite = (l_FilenameProvider.initialize(product_filename) is not False) if not IsValidSatellite: raise MajaPluginVenusException( "The file <{}> is not a valid Venus L1 product.".format( product_filename)) # ********************************************************************************************************* # Register the Header o the Input image file # ********************************************************************************************************* LOGGER.debug("Load the Venus L1 Header file : '" + product_filename + "'") l_L1XMLHandler = HeaderImageEarthExplorerXMLFileHandler( product_filename) # ********************************************************************************************************* # TOA Reader connection # ********************************************************************************************************* l_L1NoData = l_L1XMLHandler.get_no_data_value_as_double() l_ReflectanceQuantificationValue = l_L1XMLHandler.get_reflectance_quantification_value( ) l_reflectanceMultiplicationValues = [] if xml_tools.as_bool(l2comm.get_value("CalAdjustOption")): l_factor = xml_tools.as_float_list( l2comm.get_value("CalAdjustFactor")) if len(l_factor) != (VenusL1ImageFileReader.TOALastChannel - VenusL1ImageFileReader.TOAFirstChannel + 1): raise MajaPluginVenusException( "Not the same number of Calibration coeffs than L1 bands") for i in range(0, len(l_factor)): l_reflectanceMultiplicationValues.append( l_ReflectanceQuantificationValue * l_factor[i]) else: for i in range(VenusL1ImageFileReader.TOALastChannel - VenusL1ImageFileReader.TOAFirstChannel + 1): l_reflectanceMultiplicationValues.append( l_ReflectanceQuantificationValue) l_RealL1NoData = l_L1NoData * l_ReflectanceQuantificationValue # ********************************************************************************************************* # L1 TOA image pipeline connection # ********************************************************************************************************* if not l_FilenameProvider.m_TOAImageFileName: raise MajaPluginVenusException( "VenusL1ImageFileReader : The TOA image does not exist !") tmp_l1toa_roi = os.path.join(working_dir, "l1toa_roi.tif") app_l1_toa_roi = extract_roi(l_FilenameProvider.m_TOAImageFileName, [ channel for channel in range(VenusL1ImageFileReader.TOALastChannel - VenusL1ImageFileReader.TOAFirstChannel + 1) ], tmp_l1toa_roi, write_output=False) self._l1toa_pipeline.add_otb_app(app_l1_toa_roi) tmp_sat_roi = os.path.join(working_dir, "sat_roi.tif") app_sat_roi = extract_roi(l_FilenameProvider.m_TOAImageFileName, [VenusL1ImageFileReader.SATChannel - 1], tmp_sat_roi, write_output=False) self._sat_pipeline.add_otb_app(app_sat_roi) #Multiply scalar by quantif tmp_l1toa_mbs = os.path.join(working_dir, "l1toa.tif") app_l1toa_mbs = multiply_by_scalar( app_l1_toa_roi.getoutput().get("out"), l_ReflectanceQuantificationValue, output_image=tmp_l1toa_mbs, write_output=False) self._l1toa_pipeline.add_otb_app(app_l1toa_mbs) # update all extract ROI in once write_images([ app_l1toa_mbs.getoutput().get("out"), app_sat_roi.getoutput().get("out") ], [tmp_l1toa_mbs, tmp_sat_roi]) self._toascalar = tmp_l1toa_mbs # ********************************************************************************************************* # L1 PIX image pipeline connection # ********************************************************************************************************* tmp_l1pix_roi = os.path.join(working_dir, "l1pix.tif") app_l1_pix_roi = extract_roi(l_FilenameProvider.m_TOAImageFileName, [VenusL1ImageFileReader.PIXChannel - 1], tmp_l1pix_roi + ":uint16", write_output=False) self._l1pix_pipeline.add_otb_app(app_l1_pix_roi) self._l1pix = app_l1_pix_roi.getoutput().get("out") # ********************************************************************************************************* # START READ L1 for ALGORITHMS # ********************************************************************************************************* if pReadL1Mode == ReadL1Mode.READ_L1_MODE_FOR_ALGORITHMS: # ********************************************************************************************************* # L2 PIX image pipeline connection # ********************************************************************************************************* # LAIG-FA-MAC-131720-CS : New for 4.2 # Before resample, binarytovector -> resample -> vectortobinary tmp_l2pix_bin2vec = os.path.join(working_dir, "l2pix_bin2vec.tif") param_l2pix_bin2vec = { "im": app_l1_pix_roi.getoutput().get("out"), "out": tmp_l2pix_bin2vec + ":uint8", "nbcomp": VenusL1ImageFileReader.PIXNumberOfComponentsPerPixel } app_l2pix_bin2vec = OtbAppHandler("BinaryToVector", param_l2pix_bin2vec, write_output=False) self._l2pix_pipeline.add_otb_app(app_l2pix_bin2vec) tmp_l2pix_resample = os.path.join(working_dir, "l2pix_resample.tif") app_l2pix_resample = resample( app_l2pix_bin2vec.getoutput().get("out"), self._dem.ALTList[0], tmp_l2pix_resample, OtbResampleType.LINEAR_WITH_RADIUS, padradius=4.0, threshold=0.0, write_output=False) self._l2pix_pipeline.add_otb_app(app_l2pix_resample) #L2 PIX is concatenate tmp_l2pix_binconcat = os.path.join(working_dir, "l2pix.tif") param_l2pix_binconcat = { "im": app_l2pix_resample.getoutput().get("out"), "out": tmp_l2pix_binconcat + ":uint16" } app_l2pix_binconcat = OtbAppHandler("BinaryConcatenate", param_l2pix_binconcat, write_output=False) self._l2pix = app_l2pix_binconcat.getoutput().get("out") self._l2pix_pipeline.add_otb_app(app_l2pix_binconcat) # ********************************************************************************************************* # L2 EDG image pipeline connection # ********************************************************************************************************* tmp_edg_thresholder = os.path.join(working_dir, "edg_thresholder1.tif") param_edg_thresholder1 = { "im": self._toascalar, "thresholdvalue": l_RealL1NoData, "equalvalue": 255, "outsidevalue": 0, "out": tmp_edg_thresholder + ":uint8" } app_edg_thresholder1 = OtbAppHandler("OneBandEqualThreshold", param_edg_thresholder1, write_output=True) self._edg_pipeline.add_otb_app(app_edg_thresholder1) tmp_edg_resample = os.path.join(working_dir, "edg_resample.tif") app_edg_resample = resample( app_edg_thresholder1.getoutput().get("out"), self._dem.ALTList[0], tmp_edg_resample, OtbResampleType.BCO, padradius=4.0, write_output=True) self._edg_pipeline.add_otb_app(app_edg_resample) # Threshold the output out_sub_edg = os.path.join(working_dir, "edg_thresholder2.tif") param_edg_thresholder2 = { "im": app_edg_resample.getoutput().get("out"), "thresholdvalue": 0, "equalvalue": 1, "outsidevalue": 0, "out": out_sub_edg + ":uint8" } app_edg_thresholder2 = OtbAppHandler("OneBandEqualThreshold", param_edg_thresholder2, write_output=True) self._edg_pipeline.add_otb_app(app_edg_thresholder2) # ********************************************************************************************************* # IPEDGSub image pipeline connection # ********************************************************************************************************* tmp_edgsub_resample = os.path.join(working_dir, "edgsub_resample.tif") app_edgsub_resample = resample( app_edg_thresholder1.getoutput().get("out"), self._dem.ALC, tmp_edgsub_resample, OtbResampleType.LINEAR_WITH_RADIUS, padradius=12.0, write_output=True) self._edg_pipeline.add_otb_app(app_edgsub_resample) # Threshold the output out_sub_edgsub = os.path.join(working_dir, "edgsub.tif") param_edgsub_thresholder2 = { "im": app_edgsub_resample.getoutput().get("out"), "thresholdvalue": 0, "equalvalue": 0, "outsidevalue": 1, "out": out_sub_edgsub + ":uint8" } app_edgsub_thresholder2 = OtbAppHandler("OneBandEqualThreshold", param_edgsub_thresholder2, write_output=True) self._edgsub = app_edgsub_thresholder2.getoutput().get("out") self._edg_pipeline.add_otb_app(app_edgsub_thresholder2) # ********************************************************************************************************* # L2 TOA image pipeline connection # ********************************************************************************************************* tmp_l2toa_resample = os.path.join(working_dir, "l2toa_resample.tif") app_l2toa_resample = resample(self._toascalar, self._dem.ALTList[0], tmp_l2toa_resample, OtbResampleType.BCO, padradius=4.0, write_output=False) self._l2toa_pipeline.add_otb_app(app_l2toa_resample) l2toa_list = [] l_toathresholdminvalue = 0 l_toathresholvalue = -10 #Apply EDG mask on l2toa resampled tmp_l2toa = os.path.join(working_dir, "l2toa.tif") app_l2toa = apply_mask(app_l2toa_resample.getoutput().get("out"), app_edg_thresholder2.getoutput().get("out"), l_toathresholvalue, tmp_l2toa, write_output=False) self._l2toa = app_l2toa.getoutput().get("out") self._l2toa_pipeline.add_otb_app(app_l2toa) # ********************************************************************************************************* # TOA Sub image pipeline connection # ********************************************************************************************************* tmp_toasub_resample = os.path.join(working_dir, "toasub_resample.tif") app_toasub_resample = resample(self._toascalar, self._dem.ALC, tmp_toasub_resample, OtbResampleType.LINEAR_WITH_RADIUS, padradius=4.0, write_output=True) self._l2toa_pipeline.add_otb_app(app_toasub_resample) # Threshold the output out_edgsub_threshold = os.path.join(working_dir, "edgsubthreshold.tif") param_edgsub_threshold = { "im": app_edgsub_resample.getoutput().get("out"), "thresholdvalue": 0, "equalvalue": 1, "outsidevalue": 0, "out": out_edgsub_threshold + ":uint8" } app_edgsub_threshold = OtbAppHandler("OneBandEqualThreshold", param_edgsub_threshold, write_output=True) self._edg_pipeline.add_otb_app(app_edgsub_threshold) tmp_l2subtoa = os.path.join(working_dir, "toasub.tif") app_l2subtoa = apply_mask( app_toasub_resample.getoutput().get("out"), app_edgsub_threshold.getoutput().get("out"), l_toathresholvalue, tmp_l2subtoa, write_output=True) self._toasub = app_l2subtoa.getoutput().get("out") self._l2toa_pipeline.add_otb_app(app_l2subtoa) # ********************************************************************************************************* # L2EDG - Actualization of the L2 edge mask # ********************************************************************************************************* #tmp_l2edg_threshold = os.path.join(working_dir, "l2edg_threshold.tif") #app_l2edg_threshold = binary_threshold(self._edgsub, # lower_threshold=0, # inside_value=1000, # outside_value=0, # output_image=tmp_l2edg_threshold + ":uint8", # write_output=True) #self._l2edg_pipeline.add_otb_app(app_l2edg_threshold) tmp_l2edg_resample = os.path.join(working_dir, "l2edg.tif") app_l2edg_resample = resample(self._edgsub, self._dem.ALTList[0], tmp_l2edg_resample + ":uint8", OtbResampleType.LINEAR, padradius=4.0, threshold=0.001, write_output=True) self._l2edg = app_l2edg_resample.getoutput().get("out") self._l2edg_pipeline.add_otb_app(app_l2edg_resample) # ********************************************************************************************************* # SAT image pipeline connection # ********************************************************************************************************* tmp_sat_bin2vec = os.path.join(working_dir, "sat_bin2vec.tif") param_sat_bin2vec = { "im": tmp_sat_roi, "out": tmp_sat_bin2vec + ":uint8", "nbcomp": VenusL1ImageFileReader.SATNumberOfComponentsPerPixel } app_sat_bin2vec = OtbAppHandler("BinaryToVector", param_sat_bin2vec, write_output=False) self._sat_pipeline.add_otb_app(app_sat_bin2vec) l_l2sat_thresholdvalue = l2comm.get_value_f("SaturationThreshold") tmp_sat_resample = os.path.join(working_dir, "l2sat.tif") app_sat_resample = resample(app_sat_bin2vec.getoutput().get("out"), self._dem.ALTList[0], tmp_sat_resample + ":uint8", OtbResampleType.BCO, padradius=4.0, threshold=l_l2sat_thresholdvalue, write_output=False) self._l2sat = app_sat_resample.getoutput().get("out") self._sat_pipeline.add_otb_app(app_sat_resample) # ********************************************************************************************************* # IPSAT Sub image pipeline connection # ********************************************************************************************************* l_sat_subthresholdvalue = l2comm.get_value_f( "SaturationThresholdSub") tmp_satsub_resample = os.path.join(working_dir, "satsub.tif") app_satsub_resample = resample( app_sat_bin2vec.getoutput().get("out"), self._dem.ALC, tmp_satsub_resample + ":uint8", OtbResampleType.LINEAR_WITH_RADIUS, padradius=4.0, threshold=l_sat_subthresholdvalue) self._satsub = app_satsub_resample.getoutput().get("out") self._sat_pipeline.add_otb_app(app_satsub_resample) # ********************************************************************************************************* # CLA image pipeline connection # ********************************************************************************************************* LOGGER.debug( "VenusL1ImageFileReader::Initialize - CLA image filename: '" + l_FilenameProvider.m_CLAImageFileName + "'") if not l_FilenameProvider.m_CLAImageFileName: raise MajaPluginVenusException( "The CLA image does not exist !! ") self._cla = l_FilenameProvider.m_CLAImageFileName # ********************************************************************************************************* # SOL1 image pipeline connection # ********************************************************************************************************* LOGGER.debug( "VenusL1ImageFileReader::Initialize - SOL image filename: '" + l_FilenameProvider.m_SOLImageFileName + "'") if not l_FilenameProvider.m_SOLImageFileName: raise MajaPluginVenusException( "The SOL image does not exist !! ") mtdat = GdalDatasetInfo(l_FilenameProvider.m_TOAImageFileName) toaarea = Area() toaarea.size = mtdat.size toaarea.origin = mtdat.origin toaarea.spacing = mtdat.pixel_size l_SOLHeaderHandler = HeaderImageEarthExplorerXMLFileHandler( l_FilenameProvider.m_SOLHeaderFileName) l_L1SOLSubsamplingFactor = l_SOLHeaderHandler.get_sampling_factor() LOGGER.debug(l_L1SOLSubsamplingFactor) # SOL1 tmp_sol1_b1 = os.path.join(working_dir, "sol1_B1.tif") app_sol1_b1 = multiply_by_scalar( l_FilenameProvider.m_SOLImageFileName + VenusL1ImageFileReader.SOL1ChannelB1, toaarea.spacing[0], tmp_sol1_b1, write_output=False) self._sol_pipeline.add_otb_app(app_sol1_b1) tmp_sol1_b2 = os.path.join(working_dir, "sol1_B2.tif") app_sol1_b2 = multiply_by_scalar( l_FilenameProvider.m_SOLImageFileName + VenusL1ImageFileReader.SOL1ChannelB2, (-1) * toaarea.spacing[1], tmp_sol1_b2, write_output=False) self._sol_pipeline.add_otb_app(app_sol1_b2) tmp_sol1_concat = os.path.join(working_dir, "sol1_concat.tif") param_sol1_concat = { "il": [ app_sol1_b1.getoutput().get("out"), app_sol1_b2.getoutput().get("out") ], "out": tmp_sol1_concat } app_sol1_concat = OtbAppHandler("ConcatenateDoubleImages", param_sol1_concat) update_projection(l_FilenameProvider.m_TOAImageFileName, app_sol1_concat.getoutput().get("out"), l_L1SOLSubsamplingFactor) self._sol_pipeline.add_otb_app(app_sol1_concat) tmp_sol1_resample = os.path.join(working_dir, "sol1.tif") app_sol1_resample = resample( app_sol1_concat.getoutput().get("out"), self._dem.ALC, tmp_sol1_resample, OtbResampleType.LINEAR, padradius=4.0) self._sol1 = app_sol1_resample.getoutput().get("out") self._sol_pipeline.add_otb_app(app_sol1_resample) # SOL2 tmp_sol2_b1 = os.path.join(working_dir, "sol2_B1.tif") app_sol2_b1 = multiply_by_scalar( l_FilenameProvider.m_SOLImageFileName + VenusL1ImageFileReader.SOL2ChannelB1, toaarea.spacing[0], tmp_sol2_b1, write_output=False) self._sol_pipeline.add_otb_app(app_sol2_b1) tmp_sol2_b2 = os.path.join(working_dir, "sol2_B2.tif") app_sol2_b2 = multiply_by_scalar( l_FilenameProvider.m_SOLImageFileName + VenusL1ImageFileReader.SOL2ChannelB2, (-1) * toaarea.spacing[1], tmp_sol2_b2, write_output=False) self._sol_pipeline.add_otb_app(app_sol2_b2) tmp_sol2_concat = os.path.join(working_dir, "sol2_concat.tif") param_sol2_concat = { "il": [ app_sol2_b1.getoutput().get("out"), app_sol2_b2.getoutput().get("out") ], "out": tmp_sol2_concat } app_sol2_concat = OtbAppHandler("ConcatenateDoubleImages", param_sol2_concat) update_projection(l_FilenameProvider.m_TOAImageFileName, app_sol2_concat.getoutput().get("out"), l_L1SOLSubsamplingFactor) self._sol_pipeline.add_otb_app(app_sol2_concat) tmp_sol2_resample = os.path.join(working_dir, "sol2.tif") app_sol2_resample = resample( app_sol2_concat.getoutput().get("out"), self._dem.ALC, tmp_sol2_resample, OtbResampleType.LINEAR, padradius=4.0) self._sol2 = app_sol2_resample.getoutput().get("out") self._sol_pipeline.add_otb_app(app_sol2_resample) # ********************************************************************************************************* # DTMVIE image pipeline connection # ********************************************************************************************************* LOGGER.debug( "VenusL1ImageFileReader::Initialize - VIE image filename: '" + l_FilenameProvider.m_VIEImageFileName + "'") l_VIEHeaderHandler = HeaderImageEarthExplorerXMLFileHandler( l_FilenameProvider.m_VIEHeaderFileName) l_L1VIESubsamplingFactor = l_VIEHeaderHandler.get_sampling_factor() LOGGER.debug(l_L1VIESubsamplingFactor) tmp_vieb5b1_mult = os.path.join(working_dir, "vie5b1_mult.tif") app_vieb5b1_mult = multiply_by_scalar( l_FilenameProvider.m_VIEImageFileName + VenusL1ImageFileReader.VIEB5ChannelB1, toaarea.spacing[0], tmp_vieb5b1_mult, write_output=False) self._dtmvie_pipeline.add_otb_app(app_vieb5b1_mult) tmp_vieb5b2_mult = os.path.join(working_dir, "vie5b2_mult.tif") app_vieb5b2_mult = multiply_by_scalar( l_FilenameProvider.m_VIEImageFileName + VenusL1ImageFileReader.VIEB5ChannelB2, (-1) * toaarea.spacing[1], tmp_vieb5b2_mult, write_output=False) self._dtmvie_pipeline.add_otb_app(app_vieb5b2_mult) tmp_vieb6b1_mult = os.path.join(working_dir, "vie6b1_mult.tif") app_vieb6b1_mult = multiply_by_scalar( l_FilenameProvider.m_VIEImageFileName + VenusL1ImageFileReader.VIEB6ChannelB1, toaarea.spacing[0], tmp_vieb6b1_mult, write_output=False) self._dtmvie_pipeline.add_otb_app(app_vieb6b1_mult) tmp_vieb6b2_mult = os.path.join(working_dir, "vie6b2_mult.tif") app_vieb6b2_mult = multiply_by_scalar( l_FilenameProvider.m_VIEImageFileName + VenusL1ImageFileReader.VIEB6ChannelB2, (-1) * toaarea.spacing[1], tmp_vieb6b2_mult, write_output=False) self._dtmvie_pipeline.add_otb_app(app_vieb6b2_mult) tmp_dtmvie_concat = os.path.join(working_dir, "dtmvie_concat.tif") param_dtmvie_concat = { "il": [ app_vieb5b1_mult.getoutput().get("out"), app_vieb5b2_mult.getoutput().get("out"), app_vieb6b1_mult.getoutput().get("out"), app_vieb6b2_mult.getoutput().get("out") ], "out": tmp_dtmvie_concat } app_dtmvie_concat = OtbAppHandler("ConcatenateDoubleImages", param_dtmvie_concat) update_projection(l_FilenameProvider.m_TOAImageFileName, app_dtmvie_concat.getoutput().get("out"), l_L1VIESubsamplingFactor) self._dtmvie_pipeline.add_otb_app(app_dtmvie_concat) tmp_dtmvie_resample = os.path.join(working_dir, "dtmvie.tif") app_dtmvie_resample = resample( app_dtmvie_concat.getoutput().get("out"), self._dem.ALC, tmp_dtmvie_resample, OtbResampleType.LINEAR, padradius=4.0) self._dtmvie = app_dtmvie_resample.getoutput().get("out") self._dtmvie_pipeline.add_otb_app(app_dtmvie_resample) # ********************************************************************************************************* # VIE image pipeline connection # ********************************************************************************************************* tmp_shadowvie_concat = os.path.join(working_dir, "shadowvie_concat.tif") param_shadowvie_concat = { "il": [ app_vieb5b1_mult.getoutput().get("out"), app_vieb5b2_mult.getoutput().get("out") ], "out": tmp_shadowvie_concat } app_shadowvie_concat = OtbAppHandler("ConcatenateDoubleImages", param_shadowvie_concat) self._shadowvie_pipeline.add_otb_app(app_shadowvie_concat) update_projection(l_FilenameProvider.m_TOAImageFileName, app_shadowvie_concat.getoutput().get("out"), l_L1VIESubsamplingFactor) tmp_shadowvie_resample = os.path.join(working_dir, "shadowvie.tif") app_shadowvie_resample = resample( app_shadowvie_concat.getoutput().get("out"), self._dem.ALC, tmp_shadowvie_resample, OtbResampleType.LINEAR, padradius=4.0) self._shadowvie = app_shadowvie_resample.getoutput().get("out") self._shadowvie_pipeline.add_otb_app(app_shadowvie_resample) # Fill the datas self.dict_of_vals["IPEDGSubOutput"] = self._edgsub self.dict_of_vals["SOL1Image"] = self._sol1 self.dict_of_vals["SOL2Image"] = self._sol2 self.dict_of_vals["DTMVIEImage"] = self._dtmvie self.dict_of_vals["IPTOASubOutput"] = self._toasub self.dict_of_vals["L2TOAImageList"] = [self._l2toa] self.dict_of_vals["ViewingZenithMeanMap"] = self._meanZenithMap self.dict_of_vals["ViewingAzimuthMeanMap"] = self._meanAzimuthMap self.dict_of_vals["CLAImage"] = self._cla self.dict_of_vals["IPSATSubOutput"] = self._satsub self.dict_of_vals["ShadowVIEImage"] = self._shadowvie if self._plugin.CirrusMasking: l_CirrusBandCode = l2comm.get_value("CirrusBandCode") l_CirrusBandIdx = self._plugin.BandsDefinitions.get_band_id_in_l2_coarse( l_CirrusBandCode) tmp = os.path.join(working_dir, "l1toacirrus.tif") app = extract_roi(self._toascalar, [l_CirrusBandIdx - 1], tmp) self.dict_of_vals["L1TOACirrusImage"] = app.getoutput().get( "out") self.dict_of_vals["L2EDGOutputList"] = [self._l2edg] self.dict_of_vals["L2SATImageList"] = [self._l2sat] self.dict_of_vals["L2PIXImageList"] = [self._l2pix] self.dict_of_vals["L1PIXImageList"] = [self._l1pix] self.dict_of_vals["L1TOAImageList"] = [self._toascalar]
def muscate_initialize(self, product_filename, plugin_base, validate=False, schema_path=None): # Initialize the Image filename provider if not MajaMuscateL1ImageInformations.is_a_muscate_by_checking_the_filename( product_filename): LOGGER.debug("The filename <" + product_filename + "> is not an 'muscate' L1 header file.") return False if not MajaMuscateL1ImageInformations.is_a_muscate_by_checking_the_satellite( product_filename, plugin_base): LOGGER.debug( "The filename <" + product_filename + "> is not an 'muscate' L1 header file (by reading platform in the xml file)." ) return False # Init XML handler lHandler = MuscateXMLFileHandler(product_filename, validate=validate, schema_path=schema_path) # Store the satellite self.Satellite = lHandler.get_string_value_of("Platform") self.SatelliteID = self.Satellite.upper().replace("-", "") # Store the plugin name self.PluginName = plugin_base.PluginName self.ProductFileName = product_filename self.FileCategory = plugin_base.ShortFileType # LSC self.LevelType = "L1VALD" self.Prefix = self.Satellite self.FileClass = "TEST" # LANDSAT5-TM-XSTH... self.Site = lHandler.get_string_value_of("ZoneGeo") self.ProductDateStr = lHandler.get_acquisition_date_formated_yyyymmdd( ) # YYYYMMDD # LANDSAT5-TM-XSTH_20100118-103000-000_L1C_EU93066200A00B_C_V1-0 self.ProductId = lHandler.get_string_value_of("ProductId") self.ProductVersion = lHandler.get_string_value_of("ProductVersion") l_DatePDV = lHandler.get_date_pdv_formated_utc( ) # UTC=2010-01-18T12:00:00 self.ProductDate = date_utils.get_datetime_from_utc(l_DatePDV) self.GenerationDateStr = lHandler.get_string_value_of("ProductionDate") self.AcquisitionStart = lHandler.get_string_value_of("AcquisitionDate") self.OrbitNumber = lHandler.get_string_value_of("OrbitNumber") self.ReferenceSiteDefinitionId = "UNKNOWN" self.HeaderFilename = product_filename self.HeaderHandler = lHandler if xml_tools.get_only_value(lHandler.root, "//Product_Characteristics/INSTRUMENT", check=True) is not None: self.Instrument = lHandler.get_string_value_of("Instrument") if xml_tools.get_only_value( lHandler.root, "//Product_Characteristics/SPECTRAL_CONTENT", check=True) is not None: self.SpectralContent = lHandler.get_string_value_of( "SpectralContent").replace("+", "") # VENUS specification # Store the VIE and SOL filenames (DATA and Headers) to copy in the L2 product self.SOLHeaderFileName = "" self.SOLImageFileName = "" self.VIEHeaderFileName = "" self.VIEImageFileName = "" # Initialize the parameters necessary for the core of the algorithms of MACCS # Get longitude and latitude coordinates of the product ulc = lHandler.get_upper_left_corner() lrc = lHandler.get_lower_right_corner() # Estimation of the coordinate of the central point center = lHandler.get_center() #l_Corner.Longitude = ulc[0] + (lrc[0] - ulc[0]) / 2. #l_Corner.Latitude = ulc[1] + (lrc[1] - ulc[1]) / 2. self.CenterCorner.longitude = center[0] self.CenterCorner.latitude = center[1] self.CenterCorner.column = 0 self.CenterCorner.line = 0 # Initialize the Validity Start/Stop l_UTCValidity = date_utils.get_utc_from_datetime(self.ProductDate) if l_UTCValidity.endswith('.000Z'): l_UTCValidity = l_UTCValidity[:-5] self.UTCValidityStart = l_UTCValidity self.UTCValidityStop = l_UTCValidity # Get the list of bands in a L2 product l_BandsDefinitions = plugin_base.BandsDefinitions l_ListOfBandsL2Coarse = l_BandsDefinitions.get_list_of_band_id_in_l2_coarse( ) l_NbBandsL2Coarse = len(l_ListOfBandsL2Coarse) LOGGER.debug( "l_BandsDefinitions->GetListOfBandCodeInL2Coarse -> l_NbBandsL2Coarse: " + str(l_NbBandsL2Coarse)) LOGGER.debug( "MuscateXmllHandler->GetListOfBands -> l_NbBands : " + str(len(lHandler.get_list_of_bands()))) self.ListOfViewingAnglesPerBandAtL2Resolution = [] self.ListOfViewingAnglesPerBandAtL2CoarseResolution = [] self.ListOfViewingZenithAnglesPerBandAtL2CoarseResolution = [] self.ListOfViewingAzimuthAnglesPerBandAtL2CoarseResolution = [] # Initialize the Viewing angles for each detectors (Zenith and Azimuth) # Read the constants values from the Header Envi file name # The angles must be in degree if plugin_base.WideFieldSensor: # Initialize the Viewing angles for each detectors (Zenith and Azimuth) l_MeanViewingZenithalAngles = lHandler.get_mean_viewing_zenithal_angles( ) l_MeanViewingAzimuthalAngles = lHandler.get_mean_viewing_azimuthal_angles( ) self.ViewingAngle = { "incidence_zenith_angle": str(statistics.mean(l_MeanViewingZenithalAngles)), "incidence_azimuth_angle": str(statistics.mean(l_MeanViewingAzimuthalAngles)) } # For each bands for EnviProduct for bd in range(0, len(l_ListOfBandsL2Coarse)): angles = { "incidence_zenith_angle": str(l_MeanViewingZenithalAngles[bd]), "incidence_azimuth_angle": str(l_MeanViewingAzimuthalAngles[bd]) } self.ListOfViewingAnglesPerBandAtL2CoarseResolution.append( angles) self.ListOfViewingZenithAnglesPerBandAtL2CoarseResolution.append( str(angles["incidence_zenith_angle"])) self.ListOfViewingAzimuthAnglesPerBandAtL2CoarseResolution.append( str(angles["incidence_azimuth_angle"])) else: lViewingAngles = lHandler.get_mean_viewing_angles() l_ViewAngleZenith = lViewingAngles[0] l_ViewAngleAzimuth = lViewingAngles[1] self.ViewingAngle = { "incidence_zenith_angle": str(l_ViewAngleZenith), "incidence_azimuth_angle": str(l_ViewAngleAzimuth) } # For each bands for EnviProduct for bd in l_ListOfBandsL2Coarse: angles = { "incidence_zenith_angle": str(l_ViewAngleZenith), "incidence_azimuth_angle": str(l_ViewAngleAzimuth) } self.ListOfViewingAnglesPerBandAtL2CoarseResolution.append( angles) self.ListOfViewingZenithAnglesPerBandAtL2CoarseResolution.append( str(angles["incidence_zenith_angle"])) self.ListOfViewingAzimuthAnglesPerBandAtL2CoarseResolution.append( str(angles["incidence_azimuth_angle"])) # Fill the L2 resolution angles l_nbRes = len(l_BandsDefinitions.ListOfL2Resolution) for r in range(0, l_nbRes): l_res = l_BandsDefinitions.ListOfL2Resolution[r] l_l2bandcodes = l_BandsDefinitions.get_list_of_l2_band_code(l_res) l_l2bandidx = [ l_BandsDefinitions.get_band_id_in_l2_coarse(b) for b in l_l2bandcodes ] for b in l_l2bandidx: self.ListOfViewingAnglesPerBandAtL2Resolution.append( self.ListOfViewingAnglesPerBandAtL2CoarseResolution[b]) # Solar Angles lSolarAngles = lHandler.get_mean_solar_angles() self.SolarAngle = { "sun_zenith_angle": lSolarAngles[0], "sun_azimuth_angle": lSolarAngles[1] } if xml_tools.get_only_value(lHandler.root, "//Angles_Grids_List/Sun_Angles_Grids", check=True) is not None: self.SolarAngleGrid["StepUnit"] = xml_tools.get_attribute( lHandler.root, "//Angles_Grids_List/Sun_Angles_Grids/Zenith", "step_unit") self.SolarAngleGrid["ColStep"] = lHandler.get_string_value_of( "SunAngleColStep") self.SolarAngleGrid["RowStep"] = lHandler.get_string_value_of( "SunAngleRowStep") self.SolarAngleGrid["Azimuth"] = lHandler.get_sun_azimuthal_angles( ) self.SolarAngleGrid["Zenith"] = lHandler.get_sun_zenithal_angles() # Viewing angle grids self.ViewingAngleGrids = [] if len( xml_tools.get_all_values( lHandler.root, "//Angles_Grids_List/Viewing_Incidence_Angles_Grids_List/Band_Viewing_Incidence_Angles_Grids_List" )): if lHandler.has_per_band_angles(): for bn, bandid in l_BandsDefinitions.L2CoarseBandMap.items(): zonelist = lHandler.get_list_of_zones(bn) zenith_values = lHandler.get_viewing_zenithal_angles(bn) azimuth_values = lHandler.get_viewing_azimuthal_angles(bn) LOGGER.debug("Viewing Angle grid for band " + str(bn)) for d, det in enumerate(zonelist): self.ViewingAngleGrids.append({ "StepUnit": lHandler.get_viewing_grid_step_unit(bn, det), "ColStep": lHandler.get_viewing_grid_col_step(bn, det), "RowStep": lHandler.get_viewing_grid_row_step(bn, det), "Band": str(bandid), "Detector": det.lstrip('0'), "Azimuth": azimuth_values[d], "Zenith": zenith_values[d] }) else: zonelist = lHandler.get_list_of_zones("dummy") for det in zonelist: LOGGER.debug("Viewing Angle grid for det " + det) self.ViewingAngleGrids.append({ "StepUnit": lHandler.get_viewing_grid_step_unit(bandid=None, det=det), "ColStep": lHandler.get_viewing_grid_col_step(bandid=None, det=det), "RowStep": lHandler.get_viewing_grid_row_step(bandid=None, det=det), "Detector": det.lstrip('0'), "Azimuth": lHandler.get_viewing_azimuthal_angles(det)[0], "Zenith": lHandler.get_viewing_zenithal_angles(det)[0] }) # Area by resolution self.AreaByResolution = [] l_grpSuffixes = l_BandsDefinitions.ListOfL2Resolution if len(l_grpSuffixes) == 1: l_grpSuffixes = ["XS"] for res in l_grpSuffixes: l_path_group_geo = "//Group_Geopositioning_List/Group_Geopositioning[@group_id='{}']/{}" curArea = Area() curArea.origin = (xml_tools.get_xml_string_value( lHandler.root, l_path_group_geo.format(res, "ULX")), xml_tools.get_xml_string_value( lHandler.root, l_path_group_geo.format(res, "ULY"))) curArea.spacing = (xml_tools.get_xml_string_value( lHandler.root, l_path_group_geo.format(res, "XDIM")), xml_tools.get_xml_string_value( lHandler.root, l_path_group_geo.format(res, "YDIM"))) curArea.size = (xml_tools.get_xml_string_value( lHandler.root, l_path_group_geo.format(res, "NCOLS")), xml_tools.get_xml_string_value( lHandler.root, l_path_group_geo.format(res, "NROWS"))) self.AreaByResolution.append(curArea) # Spectral information self.SpectralInfo = [] l_pathBase = "//Spectral_Band_Informations_List/Spectral_Band_Informations[@band_id='{}']" l_pathNativeCoeff = "/Calibration_Coefficients_Lists/Native_Coefficients_List/COEFFICIENT[@name='{}']" l_pathRadiance = "/SOLAR_IRRADIANCE" l_pathWavelength = "/Wavelength/{}" l_pathResponse = "/Spectral_Response/{}" l_pathsSpecInfo = { 'PhysicalGain': l_pathNativeCoeff.format("PhysicalGain"), 'LuminanceMax': l_pathNativeCoeff.format("LuminanceMax"), 'LuminanceMin': l_pathNativeCoeff.format("LuminanceMin"), 'QuantizeCalMax': l_pathNativeCoeff.format("QuantizeCalMax"), 'QuantizeCalMin': l_pathNativeCoeff.format("QuantizeCalMin"), 'RadianceAdd': l_pathNativeCoeff.format("RadianceAdd"), 'RadianceMult': l_pathNativeCoeff.format("RadianceMult"), 'ReflectanceAdd': l_pathNativeCoeff.format("ReflectanceAdd"), 'ReflectanceMult': l_pathNativeCoeff.format("ReflectanceMult"), 'SolarIrradiance': l_pathRadiance, 'WavelengthMin': l_pathWavelength.format("MIN"), 'WavelengthMax': l_pathWavelength.format("MAX"), 'WavelengthCentral': l_pathWavelength.format("CENTRAL"), 'ResponseStep': l_pathResponse.format("STEP"), 'ResponseValues': l_pathResponse.format("VALUES") } for b, bidx in l_BandsDefinitions.L1BandMap.items(): specInfo = {"Band": b} realBase = l_pathBase.format(b) for measure, pathMeasure in l_pathsSpecInfo.items(): res = xml_tools.get_xml_string_value(lHandler.root, realBase + pathMeasure, check=True) if len(res): specInfo[measure] = res if measure == 'PhysicalGain': specInfo[measure] = float(res) self.SpectralInfo.append(specInfo) # ------------------------------------------------------------------------- # 4.2: New # Set the L1 no data value self.L1NoData = int(lHandler.get_string_value_of("L1NoData")) # Set the reflectance quantification value self.ReflectanceQuantification = 1. / float( lHandler.get_string_value_of("QuantificationValue")) # Computes the real value of the L1 NoData self.RealL1NoData = float( self.L1NoData) * self.ReflectanceQuantification # Save metadata related to Muscate format self.MuscateData["Node_MetadataFormat"] = xml_tools.extract_nodes( lHandler.root, "//Metadata_Identification/METADATA_FORMAT") l_NodeOriginalDataDiffuser = xml_tools.extract_nodes( lHandler.root, "//ORIGINAL_DATA_DIFFUSER") if l_NodeOriginalDataDiffuser is not None: self.MuscateData[ "Node_OriginalDataDiffuser"] = l_NodeOriginalDataDiffuser self.MuscateData[ "Node_Geoposition_Informations"] = xml_tools.extract_nodes( lHandler.root, "//Geoposition_Informations") self.MuscateData[ "Node_Geometric_Informations"] = xml_tools.extract_nodes( lHandler.root, "//Geometric_Informations") #~ self.MuscateData["Identifier"] = lIdent self.MuscateData["Authority"] = lHandler.get_string_value_of( "Authority") self.MuscateData["Producer"] = lHandler.get_string_value_of("Producer") self.MuscateData["Project"] = lHandler.get_string_value_of("Project") self.MuscateData["ZoneGeo"] = lHandler.get_string_value_of("ZoneGeo") #~ self.MuscateData["Platform"] = self.Satellite self.MuscateData["AcquisitionDate"] = lHandler.get_string_value_of( "AcquisitionDate") self.MuscateData[ "UTCAcquisitionRangeMean"] = lHandler.get_string_value_of( "UTCAcquisitionRangeMean") self.MuscateData[ "UTCAcquisitionRangeDatePrecision"] = lHandler.get_string_value_of( "UTCAcquisitionRangeDatePrecision") l_NodeSolarAnglesGrid = xml_tools.extract_nodes( lHandler.root, "//Data_List/Data[Data_Properties/NATURE='Solar_Angles_Grid']") if l_NodeSolarAnglesGrid is not None: self.MuscateData["Node_Solar_Angles_Grid"] = l_NodeSolarAnglesGrid l_NodeViewingAnglesGrid = xml_tools.extract_nodes( lHandler.root, "//Data_List/Data[Data_Properties/NATURE='Viewing_Angles_Grid']") if l_NodeViewingAnglesGrid is not None: self.MuscateData[ "Node_Viewing_Angles_Grid"] = l_NodeViewingAnglesGrid l_NodeUsefulImageInfoFile = xml_tools.extract_nodes( lHandler.root, "//Data_List/Data[Data_Properties/NATURE='Useful_Image_Informations_File']" ) if l_NodeUsefulImageInfoFile is not None: self.MuscateData[ "Node_Useful_Image_Informations_File"] = l_NodeUsefulImageInfoFile l_NodeUsefulImage = xml_tools.extract_nodes( lHandler.root, "//Mask_List/Mask[Mask_Properties/NATURE='Useful_Image']") if l_NodeUsefulImage is not None: self.MuscateData["Node_Useful_Image"] = l_NodeUsefulImage l_NodeDetFoo = xml_tools.get_only_value( lHandler.root, "//Mask_List/Mask/Mask_Properties/NATURE[.='Detector_Footprint']", check=True) if l_NodeDetFoo is not None: self.MuscateData[ "ZoneMaskFileNames"] = lHandler.get_map_list_of_detector_footprint_image_filenames( ) pix_node = xml_tools.get_only_value( lHandler.root, "//Mask_List/Mask/Mask_Properties/NATURE[.='Aberrant_Pixels']", check=True) if pix_node is not None: self.MuscateData[ "PIXImages"] = lHandler.get_list_of_pix_mask_filenames() self.MuscateData[ "PIXIndices"] = lHandler.get_list_of_pix_mask_indices() spectral_node = xml_tools.extract_nodes( lHandler.root, "//Radiometric_Informations/Spectral_Band_Informations_List") if spectral_node is not None: self.MuscateData[ "Node_Spectral_Band_Informations_List"] = spectral_node qualityGeo_node = xml_tools.extract_nodes( lHandler.root, "//Current_Product/Product_Quality_List[@level='Geo']") if qualityGeo_node is not None: self.MuscateData["Node_Product_Quality_List_Geo"] = qualityGeo_node qualityNatif_node = xml_tools.extract_nodes( lHandler.root, "//Current_Product/Product_Quality_List[@level='Natif']") if qualityNatif_node is not None: self.MuscateData[ "Node_Product_Quality_List_Natif"] = qualityNatif_node processingJob_node = xml_tools.extract_nodes( lHandler.root, "//Production_Informations/Processing_Jobs_List") if processingJob_node is not None: self.MuscateData["Node_Processing_Jobs_List"] = processingJob_node return True