def __init__(self, node): self.band_id = str(get_only_value(node, BAND_ID)) self.detector_id = str(get_only_value(node, DETECTOR_ID)) LOGGER.debug("self.band id : %s:%s", self.band_id, type(self.band_id)) LOGGER.debug("self.detectorid %s:%s", self.detector_id, type(self.detector_id)) self.zenith_values = node.xpath(GRID_ZENITH) self.azimuth_values = node.xpath(GRID_AZIMUTH) # Get ROW_STEP and COL_STEP all_col_step = node.xpath(COL_STEP) all_row_step = node.xpath(ROW_STEP) all_step_unit = node.xpath(STEP_UNIT) LOGGER.debug("%s, %s", all_col_step, all_row_step) LOGGER.debug(set(all_row_step)) if not len(set(all_col_step)) == 1: raise MajaPluginSentinel2Exception( "COL_STEP is different for Zenith than Azimuth %s" % all_col_step) if not len(set(all_row_step)) == 1: raise MajaPluginSentinel2Exception( "ROW_STEP is different for Zenith than Azimuth %s" % all_row_step) if not len(set(all_step_unit)) == 1: raise MajaPluginSentinel2Exception( "Step units are different for Zenith than Azimuth %s" % all_step_unit) self.col_step = all_col_step[0] self.row_step = all_row_step[0] self.step_unit = all_step_unit[0]
def _extract_info(self, tile_angles_node, list_viewing_angles_node=None): # If L2 level, tile_angles_node to search mean and list are not on the same level if list_viewing_angles_node is None: list_viewing_angles_node = tile_angles_node incidence_angle_list_node = tile_angles_node.xpath( MEAN_INCIDENCE_ANGLE) LOGGER.debug("incidence_angle_list_node %s", incidence_angle_list_node) self.incidence_angles_mean = \ {get_only_value(x, BAND_ID): {"incidence_zenith_angle": get_only_value(x, ZENITH_ANGLE), "incidence_azimuth_angle": get_only_value(x, AZIMUTH_ANGLE)} for x in incidence_angle_list_node} LOGGER.debug("incidence_angles %s", self.incidence_angles_mean) self.list_of_viewing_angles_grid = [] viewing_incidence_angles_grids_nodes = list_viewing_angles_node.xpath( VIEWING_INCIDENCE_ANGLE_GRIDS_NODE) LOGGER.debug(viewing_incidence_angles_grids_nodes) for node in viewing_incidence_angles_grids_nodes: self.list_of_viewing_angles_grid.append( MajaViewingIncidenceAnglesByBandAndDetector(node)) LOGGER.debug("self.list_of_viewing_angles_grid %s", self.list_of_viewing_angles_grid)
def get_list_of_mask(self, masktype): masks = [] for band in LIST_OF_L1BAND_CHARID: path = "//MASK_FILENAME[@type='" + masktype + "' and contains(.,'" + band + "')]" masks.append(xml_tools.get_only_value(self.root, path).text) LOGGER.debug("Adding " + xml_tools.get_only_value(self.root, path).text + " to list of masks for type " + masktype) return masks
def _read(self, tile_angles_node, sun_angle_node_name): """ Read the following information from the given xml node - mean of sun zenith and sun azimuth angle - the grid of values - the step in x and y """ LOGGER.debug(tile_angles_node) sun_angles_mean_node = get_only_value(tile_angles_node, MEAN_SUN_ANGLES) LOGGER.debug(sun_angles_mean_node) self.sun_angles_mean = { "sun_zenith_angle": get_only_value(sun_angles_mean_node, ZENITH_ANGLE), "sun_azimuth_angle": get_only_value(sun_angles_mean_node, AZIMUTH_ANGLE) } LOGGER.debug("Sun angles : %s", self.sun_angles_mean) SUN_ANGLES_GRID_ZENITH = sun_angle_node_name + "/" + GRID_ZENITH SUN_ANGLES_GRID_AZIMUTH = sun_angle_node_name + "/" + GRID_AZIMUTH self.zenith_angle_list_string_values = tile_angles_node.xpath( SUN_ANGLES_GRID_ZENITH) self.azimuth_angle_list_string_values = tile_angles_node.xpath( SUN_ANGLES_GRID_AZIMUTH) _sun_angles_grid_node = get_only_value(tile_angles_node, SUN_ANGLE_GRID_NODE_NAME, check=True) if _sun_angles_grid_node is not None: all_col_step = _sun_angles_grid_node.xpath(COL_STEP) all_row_step = _sun_angles_grid_node.xpath(ROW_STEP) all_step_unit = _sun_angles_grid_node.xpath(STEP_UNIT) LOGGER.debug("%s, %s", all_col_step, all_row_step) LOGGER.debug(set(all_row_step)) if not len(set(all_col_step)) == 1: raise MajaPluginSentinel2Exception( "COL_STEP is different for Zenith than Azimuth %s" % all_col_step) if not len(set(all_row_step)) == 1: raise MajaPluginSentinel2Exception( "ROW_STEP is different for Zenith than Azimuth %s" % all_row_step) if not len(set(all_step_unit)) == 1: raise MajaPluginSentinel2Exception( "Step units are different for Zenith than Azimuth %s" % all_step_unit) self.col_step = all_col_step[0] self.row_step = all_row_step[0] self.step_unit = all_step_unit[0]
def __init__(self, gipp_filename, validate_schema=False, schema_path=None): super(GippSMACEarthExplorerXMLFileHandler, self).__init__(gipp_filename, validate_schema, schema_path) self.NumberOfBands = int( get_only_value( self.root, SMAC_EARTH_EXPLORER_HANDLER_XPATH["NumberOfBands"]).text) self.Coefficients = get_only_value( self.root, SMAC_EARTH_EXPLORER_HANDLER_XPATH["Coefficients"]).text
def __init__(self, gipp_filename, validate_schema=False, schema_path=None): super(GippCAMSEarthExplorerXMLFileHandler, self).__init__(gipp_filename, validate_schema, schema_path) self.model_levels = xml_tools.as_float_list( xml_tools.get_only_value( self.root, CAMS_EARTH_EXPLORER_HANDLER_XPATH["ModelLevels"]).text) self.model_levels = [float(f) for f in self.model_levels] self.acquisition_date_time = xml_tools.get_only_value( self.root, CAMS_EARTH_EXPLORER_HANDLER_XPATH["AcquisitionDateTime"]).text
def _read(self, root, nss={}): """ Read the xml file and extract angles """ product_info_node = get_only_value(root, PRODUCT_INFO) LOGGER.debug("product_info_node %s", product_info_node) self.sun_angles = MajaSunAngles(product_info_node, SUN_ANGLE_NODE_NAME) list_of_viewing_angles_node = get_only_value(product_info_node, VIEWING_ANGLE_LIST) LOGGER.debug("list_of_viewing_angles_node %s", list_of_viewing_angles_node) self.viewing_incidence_angle = MajaViewingIncidenceAngles(product_info_node, list_of_viewing_angles_node)
def _read(self, root, nss={}): """ Read the xml file and extract angles """ geometric_info_node = get_only_value(root, GEOMETRIC_INFO, nss) tile_angles_node = get_only_value(geometric_info_node, TILE_ANGLES, nss) LOGGER.debug(tile_angles_node) self.sun_angles = MajaSunAngles(tile_angles_node, SUN_ANGLE_NODE_NAME) self.viewing_incidence_angle = MajaViewingIncidenceAngles( tile_angles_node)
def write_private_sto_xmlhandler(self, sto_header_filename, p_ReferenceProductHeaderId, p_ReferenceProductInstance, p_dem, p_Validity_Start, p_Validity_Stop, p_Mission, p_root_template_directory, p_SchemaLocationDirectory, pCurrentDate, p_EnableCheckXMLFilesWithSchema=True): l_CoarseProductImageSizeX = p_dem.CoarseArea.size[0] l_CoarseProductImageSizeY = p_dem.CoarseArea.size[1] resol = 0 l_DL2L2CoarseRatio = float(p_dem.L2Areas[resol].size[0]) / float(p_dem.CoarseArea.size[0]) l_L2L2CoarseRatio = int(l_DL2L2CoarseRatio + 0.5) # --------------------------------------------------------------------------------------------------- # Date PDV in UTC format # Get Validity lCreator_Version = self.plugin.Creator_Version lFile_Version = self.plugin.L2PRODUCT_FILE_VERSION lReferenceProductHeaderId = p_ReferenceProductHeaderId lReferenceProductInstance = p_ReferenceProductInstance # --------------------------------------------------------------------------------------------------- # Write STO LOGGER.debug("Write the PRIVATE STO header file ...") output_filename = sto_header_filename # p_L2PrivateImageFilenamesProvider.GetSTOHeaderFileName() current_header_filename = os.path.join(p_root_template_directory, self.plugin.TEMPLATE_PDTANX_PRIVATE_STO_HDR) if not os.path.exists(current_header_filename): raise MajaPluginBaseException( "Internal error: the template file '" + current_header_filename + "' doesn't exist !!") # --------------------------------------------------------------------------------------------------- # Load the file output_handler = EarthExplorerXMLFileHandler(current_header_filename) # --------------------------------------------------------------------------------------------- # Update bascis parameters output_handler.update(p_Validity_Start, p_Validity_Stop, lFile_Version, output_filename, self.creator, lCreator_Version, self.notes, self.system, self.outputfileclass, pCurrentDate, lReferenceProductHeaderId) # --------------------------------------------------------------------------------------------- # Update bascis ANX parameters output_handler.update_pdtanx(p_Mission, l_CoarseProductImageSizeX, l_CoarseProductImageSizeY, lReferenceProductInstance, self.productisvalid) # Update other parameter output_handler.set_string_value("//Nodata_Value", self.nodatavalue) output_handler.set_string_value("//Subsampling_Factor/By_Line", l_L2L2CoarseRatio) output_handler.set_string_value("//Subsampling_Factor/By_Column", l_L2L2CoarseRatio) output_handler.set_string_value("//Size/Bands", self.numberofstackimages) output_handler.set_list_of_bands(self.stolistofstringdates) # , "//List_of_Bands", "Band") # The Band_Theoretical_Wavelength could be not exist (ex: VENUS) # Check the existance of the Node in the template xml file, before write the value if xml_tools.get_only_value(output_handler.root, "//Band_Theoretical_Wavelength", check=True) is not None: output_handler.set_string_value( "//Band_Theoretical_Wavelength", self.correlbandtheoreticalwavelengthforstocomposite) # Save to file output_handler.save_to_file(output_filename) if self.checkxmlfileswithschema and self.productisvalid and p_EnableCheckXMLFilesWithSchema: xml_tools.check_xml(output_filename, p_SchemaLocationDirectory)
def write_list_of_quality_indexes(self, listofquality): try: xnode = xml_tools.get_only_value(self.root, "//List_of_Quality_Indexes") except BaseException: raise MajaDataException("Error while reading the xml node '" + "//List_of_Quality_Indexes" + "' in the xml file! Details: ") l_count = len(listofquality) xnode.set("count", str(l_count)) for i in range(l_count): f = listofquality[i] node = et.Element("Quality_Index") node.set("sn", str(i + 1)) # Code node1 = et.Element("Code") node1.text = f.Code node.append(node1) # Value node2 = et.Element("Value") node2.text = f.Value node.append(node2) # band code if any if f.BandCode is not None: node3 = et.Element("Band_Code") node3.text = f.BandCode node.append(node3) xnode.append(node)
def get_lut_indexes(self): lut_indexes = {} lut_indexes_xml = xml_tools.get_only_value( self.root, LUT_EARTH_EXPLORER_HANDLER_XPATH["LUTIndexes"]) for index in lut_indexes_xml.iterchildren(): lut_indexes[index.tag] = index.text return lut_indexes
def get_string_value_of(self, key): value = xml_tools.get_only_value(self.root, MUSCATE_HANDLER_XPATH[key]) if value is not None: return value.text else: raise MajaDataException("No " + MUSCATE_HANDLER_XPATH[key] + " available in the file " + self.main_xml_file)
def _read(self, root, nss={}): """ Read the xml file and extract angles """ quality_info_node = get_only_value(root, QUALITY_INFO, namespaces=nss) masks_nodes = quality_info_node.xpath(MASKS) # LOGGER.debug(masks_nodes) for mask_element in masks_nodes: mask_type = get_only_value(mask_element, MASKS_TYPE) band_id = get_only_value(mask_element, BAND_ID) mask_filename = get_only_value(mask_element, "./text()") # LOGGER.debug("Mask type: %s \n band id %s, \n filename : %s", mask_type, band_id, mask_filename) if band_id is not None: self.masks_filepaths[mask_type][band_id] = os.path.join(self.product_directory, mask_filename) else: self.masks_filepaths[mask_type] = os.path.join(self.product_directory, mask_filename)
def __init__(self, gipp_filename): self.gipp_filename = gipp_filename self.root = get_root_xml(self.gipp_filename, deannotate=True) self.l2_site_values = {} for key, value in list(GIPP_SITE_HANDLER_XPATH.items()): result = get_only_value(self.root, value) if result is not None: self.l2_site_values[key] = result
def __init__(self, gipp_filename): self.gipp_filename = gipp_filename self.root = xml_tools.get_root_xml(self.gipp_filename, deannotate=True) self.l2_comm_values = {} for key, value in list(GIPP_COMM_HANDLER_XPATH.items()): result = xml_tools.get_only_value(self.root, value, check=True) if result is not None: self.l2_comm_values[key] = result
def get_size(self, res): # --------------------------------------------------------------------------------------------- path = "//Tile_Geocoding/Size[@resolution='" + str(res) + "']" # --------------------------------------------------------------------------------------------- # Reads the sizes ncols = int(xml_tools.get_only_value(self.root, path + "/NCOLS").text) nrows = int(xml_tools.get_only_value(self.root, path + "/NROWS").text) # --------------------------------------------------------------------------------------------- # Checks if null if (ncols == 0) or (nrows == 0): raise MajaDataException( "Impossible to read the NROWS and NCOLS coordinates for the Tile id '" + self.TileId + "' from the node Tile_Description/Size.") # --------------------------------------------------------------------------------------------- # Set the point l_Point = PointXY() l_Point.x = ncols l_Point.y = nrows return l_Point
def get_mean_viewing_angles(self): # vieAngles[0] = zenith vieAngles[1] = azimuth if xml_tools.get_only_value( self.root, "//Incidence_Angles/ZENITH_ANGLE[@unit='deg']", check=True) is not None: return (xml_tools.get_xml_float_value( self.root, "//Incidence_Angles/ZENITH_ANGLE[@unit='deg']"), xml_tools.get_xml_float_value( self.root, "//Incidence_Angles/AZIMUTH_ANGLE[@unit='deg']")) else: return 0, 0
def get_geoposition_upperleftcorner(self, res): # --------------------------------------------------------------------------------------------- path = ("//Tile_Geocoding/Geoposition[@resolution='" + str(res) + "']") # --------------------------------------------------------------------------------------------- # The values are set in double in the xml header but they can be converted in integer because # the decimal part corresponds to 0.01 pixel ulx = int(xml_tools.get_only_value(self.root, path + "/ULX").text) uly = int(xml_tools.get_only_value(self.root, path + "/ULY").text) # --------------------------------------------------------------------------------------------- # Checks if null if (ulx == 0) or (uly == 0): raise MajaDataException( "Impossible to read the ULX and ULY coordinates for the Tile id '" + self.TileId + "' from the node Tile_Description/Geoposition.") l_Point = PointXY() l_Point.x = ulx l_Point.y = uly return l_Point
def get_list_of_gipp_files(self): """ GetList_of_GIPP_Files :return: """ l_listofgipps = [] for gipp_node in xml_tools.get_all_values(self.root, GIPP_FILE): one_gipp = dict() one_gipp["index"] = xml_tools.get_only_value(gipp_node, GIPP_FILE_INDEX) one_gipp["file_location"] = xml_tools.get_xml_string_value(gipp_node, GIPP_FILE_LOCATION) one_gipp["nature"] = xml_tools.get_xml_string_value(gipp_node, GIPP_FILE_NATURE) one_gipp["logical_name"] = xml_tools.get_xml_string_value(gipp_node, GIPP_FILE_LOGICAL_NAME) l_listofgipps.append(one_gipp) return l_listofgipps
def get_geoposition_dims(self, res): # --------------------------------------------------------------------------------------------- path = "//Tile_Geocoding/Geoposition[@resolution='" + str(res) + "']" # --------------------------------------------------------------------------------------------- # Reads the sizes xdim = int(xml_tools.get_only_value(self.root, path + "/XDIM").text) ydim = int(xml_tools.get_only_value(self.root, path + "/YDIM").text) # --------------------------------------------------------------------------------------------- # Checks if null if (xdim == 0) or (ydim == 0): raise MajaDataException( "Impossible to read the ULX and ULY coordinates for the Tile id '" + self.TileId + "' from the node Tile_Description/Geoposition.") # --------------------------------------------------------------------------------------------- # Set the point l_Point = PointXY() l_Point.x = xdim l_Point.y = ydim return l_Point
def initializelistofcamsfiledescription(self, list_of_cam_files): # --------------------------------------------------------------------------------------------- self._list_of_cams_description_file = [] # Loops on the cams list for cam in list_of_cam_files: # Normaly, the prodcut is uncompress and XML file checked in the PreProcessing method l_hdrcamsfilename = os.path.splitext(cam)[0] + ".HDR" l_camsfiledescription = {} # Load the EarthExplorer XML file l_handler = GippCAMSEarthExplorerXMLFileHandler(l_hdrcamsfilename) # Converts these dates in Julian day l_camsfiledescription["date_utc"] = l_handler.acquisition_date_time l_camsfiledescription["date_jd"] = date_utils.get_julianday_as_double( date_utils.get_datetime_from_utc(l_handler.acquisition_date_time)) l_camsfiledescription["filename"] = l_hdrcamsfilename # Read the files to get the various CAMS part l_list_of_date_filenames = l_handler.get_list_of_packaged_dbl_files(True, True) # For each file, search the type for f in l_list_of_date_filenames: l_filenamename = os.path.basename(f) # AOT File ? if "CAMS_AOT" in l_filenamename: l_camsfiledescription["aot_file"] = f # MR File ? if "CAMS_MR" in l_filenamename: l_camsfiledescription["mr_file"] = f # RH File ? if "CAMS_RH" in l_filenamename: l_camsfiledescription["rh_file"] = f if "aot_file" not in l_camsfiledescription or "mr_file" not in l_camsfiledescription \ or "aot_file" not in l_camsfiledescription: LOGGER.info(l_camsfiledescription) raise MajaIOError("Missing one CAMS file") if xml_tools.get_only_value(l_handler.root, "//Earth_Explorer_Header/Variable_Header/Specific_Product_Header/NumberOfNonInterpolableValues", check=True) is not None: l_camsfiledescription["NbNonInterpolate"] = xml_tools.get_xml_int_value(l_handler.root, "//Earth_Explorer_Header/Variable_Header/Specific_Product_Header/NumberOfNonInterpolableValues") LOGGER.debug( "Add CAMS file for date [" + l_camsfiledescription["date_utc"] + ";" + str(l_camsfiledescription["date_jd"]) + "] -> file " + l_camsfiledescription["filename"]) l_camsfiledescription["model_levels"] = [str(f) for f in l_handler.model_levels] self._list_of_cams_description_file.append(l_camsfiledescription)
def generate_cla_image(self, realL1Nodata, working_dir): LOGGER.debug("MuscateL1ImageFileReader::GenerateCLAMask()") # CLA image pipeline connection # test if a CLA image file is available cla_node = xml_tools.get_only_value( self._header_handler.root, "//Data_List/Data[Data_Properties/NATURE='Cloud_Altitude_Grid']/Data_File_List/DATA_FILE", check=True) if cla_node is not None: self._claimage = self._header_handler.get_cla_image_filename() else: cla_filename = os.path.join(working_dir, "cla_constant_sub.tif") cla_image = constant_image(self._dem.ALC, realL1Nodata, cla_filename, write_output=True).getoutput()["out"] self._claimage = cla_image
def write_private_ltc_header_list_of_dbl(self, list_of_dbl): """ <DBL_Organization> <List_of_Packaged_DBL_Files count="2"> <Packaged_DBL_File sn="1"> <Relative_File_Path>VE_TEST_VSC_LUTANX_L2VALD_CAMARGUE_20120103_LTC.DBL.DIR/VE_TEST_VSC_LUTANX_L2VALD_CAMARGUE_20120101_LTC.DBL.mha</Relative_File_Path> <File_Definition>VSC_LUTANX</File_Definition> </Packaged_DBL_File> <Packaged_DBL_File sn="2"> <Relative_File_Path>VE_TEST_VSC_LUTANX_L2VALD_CAMARGUE_20120103_LTC.DBL.DIR/VE_TEST_VSC_LUTANX_L2VALD_CAMARGUE_20120103_LTC.DBL.mha</Relative_File_Path> <File_Definition>VSC_LUTANX</File_Definition> </Packaged_DBL_File> </List_of_Packaged_DBL_Files> </DBL_Organization> """ LOGGER.debug("Start writing the list of LTC DBL node ...") try: xnode = xml_tools.get_only_value( self.root, "//DBL_Organization/List_of_Packaged_DBL_Files") except BaseException: raise MajaDataException( "Error while reading the xml node '" + "//DBL_Organization/List_of_Packaged_DBL_Files" + "' in the xml file! Details: ") l_count = len(list_of_dbl) xnode.set("count", str(l_count)) for i in range(l_count): f = list_of_dbl[i] node = et.Element("Packaged_DBL_File") node.set("sn", str(i + 1)) # Relative file path node2 = et.Element("Relative_File_Path") node2.text = f.relativefilepath node.append(node2) # File definition node3 = et.Element("File_Definition") node3.text = f.filedefinition node.append(node3) # Insert to doc xnode.append(node) LOGGER.debug("Writing the list of LTC DBL node DONE")
def write_list_of_gipp_files(self, listofgipps, write_nature_node=True, p_xpath="//List_of_GIPP_Files", p_subxpath="GIPP_File"): """ <List_of_GIPP_Files count="11"> <GIPP_File sn="1"> <Nature>GIP_CKEXTL</Nature> <File_Location>NIL=OTHER:EXTERNAL</File_Location> <Logical_Name>VE_TEST_GIP_CKEXTL_S_CAMARGUE_00001_00000000_99999999</Logical_Name> </GIPP_File> """ # ", "GIPP_File try: xnode = xml_tools.get_only_value(self.root, p_xpath) except BaseException: raise MajaDataException("Error while reading the xml node '" + p_xpath + "' in the xml file! Details: ") l_count = len(listofgipps) xnode.set("count", str(l_count)) for i in range(l_count): f = listofgipps[i] node = et.Element(p_subxpath) node.set("sn", str(i + 1)) if write_nature_node: # Nature node2 = et.Element("Nature") node2.text = f.Nature node.append(node2) # File_Location node3 = et.Element("File_Location") node3.text = f.FileLocation node.append(node3) # Logical_Name node4 = et.Element("Logical_Name") node4.text = f.LogicalName node.append(node4) xnode.append(node)
def write_list_of_annex_files(self, listofannex): """ <List_of_Annex_Files count="6"> <Annex_File sk="CLD"> <Nature>VSC_PDTANX</Nature> <File_Location>./VE_TEST_VSC_L2VALD_CAMARGUE_20120101.DBL.DIR/VE_TEST_VSC_PDTANX_L2VALD_CAMARGUE_20120101_CLD.HDR</File_Location> <Logical_Name>VE_TEST_VSC_PDTANX_L2VALD_CAMARGUE_20120101_CLD</Logical_Name> </Annex_File> """ try: xnode = xml_tools.get_only_value( self.root, "//Product_Organization/List_of_Annex_Files") except BaseException: raise MajaDataException( "Error while reading the xml node '" + "//Product_Organization/List_of_Annex_Files" + "' in the xml file! Details: ") l_count = len(listofannex) xnode.set("count", str(l_count)) for i in range(l_count): f = listofannex[i] node = et.Element("Annex_File") node.set("sk", f.sk) # Nature node2 = et.Element("Nature") node2.text = f.Nature node.append(node2) # File_Location node3 = et.Element("File_Location") node3.text = f.FileLocation node.append(node3) # Logical_Name node4 = et.Element("Logical_Name") node4.text = f.LogicalName node.append(node4) xnode.append(node)
def __init__(self, granule_xml_file): """ Constructor """ super(MajaSentinel2L1GranuleXmlReader, self).__init__(granule_xml_file) self.granule_xml_file = granule_xml_file self.TileId = "" self.angles = MajaL1Angles(self.root) self.sensing_time = xml_tools.get_only_value( self.root, XPATH_TILE_S2.get("sensing_time")).text self.datastrip_id = xml_tools.get_only_value( self.root, XPATH_TILE_S2.get("datastrip_id")).text self.reference_system_code = xml_tools.get_only_value( self.root, XPATH_TILE_S2.get("reference_code")).text self.reference_system_name = xml_tools.get_only_value( self.root, XPATH_TILE_S2.get("horizontal_cs_name")).text #*******************************************************************************/ #** Set the list of Detector foot print filename */ #/*******************************************************************************/ # TEMPORARY l_DetList = self.get_list_of_mask("MSK_DETFOO") # --------------------------------------------------------------------------------------------- # PSD version detection: # Diff PSD = 14 and PSD >= 14 # ATTENTION : for PSD 14 product, the SchemaLoaction value is always PSD 12 # PSD < 14 : <MASK_FILENAME type="MSK_DEFECT">S2A_OPER_MSK_DEFECT_MPS__20150428T084140_A000065_T15SVD_B01_MSIL1C.gml</MASK_FILENAME> # PSD < 14 : <MASK_FILENAME bandId="0" type="MSK_DEFECT">S2A_OPER_MSK_DEFECT_MPS__20160914T113643_A006424_T36JTT_B01_MSIL1C.gml</MASK_FILENAME> # PSD >= 14 :<MASK_FILENAME bandId="0" type="MSK_DEFECT">GRANULE/L1C_T36JTT_A006424_20160914T081456/QI_DATA/MSK_DEFECT_B01.gml</MASK_FILENAME> # -> Automatic detection: find GRANULE/ in the path. If is present, PSD >=14 ? yes # PSD >=14 : PATH from ROOT l_FirstFilename = l_DetList[0] l_PsdNewVersion = True # If not find, PSD < 14 if not "GRANULE/" in l_FirstFilename: l_PsdNewVersion = False LOGGER.debug("The Sentinel2 XML Tile <" + granule_xml_file + "> has been detected OLD PSD version (PSD<14).") productPath = os.path.join(os.path.dirname(granule_xml_file), "QI_DATA") if l_PsdNewVersion: # return to the ROOT path productPath = os.path.join(os.path.dirname(granule_xml_file), ".." + os.path.sep + "..") LOGGER.debug("productPath : " + productPath) self.ListOfDetFootPrintHeaderFileName = [ os.path.join(productPath, m) for m in self.get_list_of_mask("MSK_DETFOO") ] self.ListOfSaturatedPixelsHeaderFileName = [ os.path.join(productPath, m) for m in self.get_list_of_mask("MSK_SATURA") ] self.ListOfDefectivPixelsHeaderFileName = [ os.path.join(productPath, m) for m in self.get_list_of_mask("MSK_DEFECT") ] self.ListOfNoDataPixelsHeaderFileName = [ os.path.join(productPath, m) for m in self.get_list_of_mask("MSK_NODATA") ]
def write_private_ltc_header(self, list_of_ltc): """ <LTC sn="1"> <Date>UTC=2012-01-01T10:51:48</Date> <Solar_Angles> <Image_Center> <Azimuth unit="deg">166.636131956</Azimuth> <Zenith unit="deg">67.602640760</Zenith> </Image_Center> </Solar_Angles> <Viewing_Angles> <Image_Center> <Azimuth unit="deg">290.804966617</Azimuth> <Zenith unit="deg">14.406687968</Zenith> </Image_Center> </Viewing_Angles> </LTC> """ LOGGER.debug("Start writing the list of LTC node ...") try: xnode = xml_tools.get_only_value(self.root, "//List_of_LTC") except BaseException: raise MajaDataException("Error while reading the xml node '" + "//List_of_LTC" + "' in the xml file! Details: ") l_count = len(list_of_ltc) xnode.set("count", str(l_count)) for i in range(l_count): f = list_of_ltc[i] node = et.Element("LTC") node.set("sn", str(i + 1)) # Date node2 = et.Element("Date") node2.text = f.Date node.append(node2) # SolarAngle node_angle = et.Element("Solar_Angles") node_im_center = et.Element("Image_Center") node_azimuth = et.Element("Azimuth") node_azimuth.set("unit", "deg") node_azimuth.text = f.solarangle.azimuth node_zenith = et.Element("Zenith") node_zenith.set("unit", "deg") node_zenith.text = f.solarangle.zenith node_im_center.append(node_azimuth) node_im_center.append(node_zenith) node_angle.append(node_im_center) node.append(node_angle) # Viewing angle node_v_angle = et.Element("Viewing_Angles") node_v_im_center = et.Element("Image_Center") node_v_azimuth = et.Element("Azimuth") node_v_azimuth.set("unit", "deg") node_v_azimuth.text = f.viewingangle.azimuth node_v_zenith = et.Element("Zenith") node_v_zenith.set("unit", "deg") node_v_zenith.text = f.viewingangle.zenith node_v_im_center.append(node_v_azimuth) node_v_im_center.append(node_v_zenith) node_v_angle.append(node_v_im_center) node.append(node_v_angle) # Insert to doc xnode.append(node) LOGGER.debug("Writing the list of LTC node DONES")
def initialize(self, filename, working_dir, has_snow): file_hdr = os.path.splitext(filename)[0] + ".HDR" file_dbl = os.path.splitext(filename)[0] + ".DBL" file_dbldir = os.path.splitext(filename)[0] + ".DBL.DIR" LOGGER.info("AUX_REFDE2 filename: " + filename) # uncompress dbl uncompress_dbl_product(file_dbl) # DEM filenames provider list_of_file = os.listdir(file_dbldir) nbresol = 0 for f in list_of_file: if "_ALT" in f and "TIF" in os.path.splitext(f)[1]: nbresol = nbresol + 1 LOGGER.info("Nb resolution found " + str(nbresol)) self.initialize_res_list(nbresol) LOGGER.info( "DEMFilenamesProvider::Initialize. Nb resolution computed:" + str(len(self._resList))) for resol in self._resList: LOGGER.debug("DEMFilenamesProvider::Initialize. Prefix resol : " + resol) handler = EarthExplorerXMLFileHandler(file_hdr) list_of_dbl_files = handler.get_list_of_packaged_dbl_files(True, False) LOGGER.info("DEMFileNames found " + str(len(list_of_dbl_files)) + " files") for i in range(0, len(list_of_dbl_files)): if list_of_dbl_files[i].split('.TIF')[-1]: raise MajaDataException( "Wrong file extension detected. Delete the file: " + str(list_of_dbl_files[i])) # -------------------------------------- # Find the correct filename for fi in list_of_dbl_files: # LAIG - FA - MAC - 1610 - CNES # ../ CONTEXTES_ANOMALIES / TMA_VENUS_maccs_errors / 4398 / VE_TEST_AUX_REFDE2_BRASCHAT_0001.DBL.DIR / VE_TEST_AUX_REFDE2_BRASCHAT_0001_SLP.TIF # Extract the last value -> SLP # ../ CONTEXTES_ANOMALIES / TMA_VENUS_maccs_errors / 4398 / VE_TEST_AUX_REFDE2_BRASCHAT_0001.DBL.DIR / VE_TEST_AUX_REFDE2_BRASCHAT_0001_ALT_R1.TIF # Extract the last value -> ALT l_splitted = (os.path.splitext(os.path.basename(fi))[0]).split("_") l_lenghtlistfilenamename = len(l_splitted) # Extract the tow last values -> ex: 0001 _SLP or ALT_R1 l_keytype = l_splitted[-1] if l_lenghtlistfilenamename > 2: l_keytype = l_splitted[-2] + "_" + l_keytype # -------------------------------------- # Test if the filename is ALC if "ALC" in l_keytype: self.ALC = fi # -------------------------------------- # Test if the filename is MSK elif "MSK" in l_keytype: self.MSK = fi # -------------------------------------- # Test if the filename is ASC elif "ASC" in l_keytype: self.ASC = fi # -------------------------------------- # Test if the filename is SLC elif "SLC" in l_keytype: self.__SLCInternal = fi else: # -------------------------------------- # Lop under resolutions for res in self._resList: # -------------------------------------- # Test if the filename is SLP if "SLP" in l_keytype: if res in l_keytype: self.__SLPListInternal.append(fi) # -------------------------------------- # Test if the filename is ALT elif "ALT" in l_keytype: if res in l_keytype: self.ALTList.append(fi) # -------------------------------------- # Test if the filename is ASP elif "ASP" in l_keytype: if res in l_keytype: self.__ASPListInternal.append(fi) else: LOGGER.debug( "Unknown Filename and associated product type.") # endloop resol # -------------------------------------- # Check existent of ALC filename if not os.path.exists(self.ALC): raise MajaDataException("The ALC file '" + self.ALC + "' of the DTM doesn't exist !") # -------------------------------------- # Check existent of MSK filename if not os.path.exists(self.MSK): raise MajaDataException("The MSK file '" + self.MSK + "' of the DTM doesn't exist !") # -------------------------------------- # Check existent of SLC filename if not os.path.exists(self.__SLCInternal): raise MajaDataException("The SLC file '" + self.__SLCInternal + "' of the DTM doesn't exist !") else: LOGGER.debug("Starting multiply " + self.__SLCInternal + " * " + str(self._coeff)) self.SLC = os.path.join( working_dir, "Mul_" + os.path.basename(self.__SLCInternal)) self._apps.add_otb_app( multiply_by_scalar(self.__SLCInternal, self._coeff, output_image=self.SLC)) mtdat = GdalDatasetInfo(self.__SLCInternal) self.CoarseArea = Area() self.CoarseArea.size = mtdat.size self.CoarseArea.origin = mtdat.origin self.CoarseArea.spacing = mtdat.pixel_size LOGGER.debug("Done") # -------------------------------------- for resol in range(0, len(self._resList)): # -------------------------------------- # Check existent of SLP filename if not os.path.exists(self.__SLPListInternal[resol]): raise MajaDataException("One of the SLP file '" + self.__SLPListInternal[resol] + "' of the DTM doesn't exist !") else: LOGGER.debug("Starting multiply " + self.__SLPListInternal[resol] + " * " + str(self._coeff)) tmp = os.path.join( working_dir, "Mul_" + os.path.basename(self.__SLPListInternal[resol])) slp_mul_app = multiply_by_scalar(self.__SLPListInternal[resol], self._coeff, output_image=tmp, write_output=False) self._apps.add_otb_app(slp_mul_app) mtdat = GdalDatasetInfo(self.__SLPListInternal[resol]) l2area = Area() l2area.size = mtdat.size l2area.origin = mtdat.origin l2area.spacing = mtdat.pixel_size self.ProjRef = mtdat.dataset.GetProjectionRef() self.L2Areas.append(l2area) LOGGER.debug("Done") self.SLPList.append(slp_mul_app.getoutput().get("out")) # -------------------------------------- # Check existent of ALT filename if not os.path.exists(self.ALTList[resol]): raise MajaDataException("One of the ALT file '" + self.ALTList[resol] + "' of the DTM doesn't exist !") # -------------------------------------- # Check existent of ASP filename if not os.path.exists(self.__ASPListInternal[resol]): raise MajaDataException("One of the ASP file '" + self.__ASPListInternal[resol] + "' of the DTM doesn't exist !") else: LOGGER.debug("Starting multiply " + self.__ASPListInternal[resol] + " * " + str(self._coeff)) tmp = os.path.join( working_dir, "Mul_" + os.path.basename(self.__ASPListInternal[resol])) asp_mul_app = multiply_by_scalar(self.__ASPListInternal[resol], self._coeff, output_image=tmp, write_output=False) self._apps.add_otb_app(asp_mul_app) LOGGER.debug("Done") self.ASPList.append(asp_mul_app.getoutput().get("out")) # end loop resol LOGGER.debug(nbresol) l_cartoCode = xml_tools.get_only_value( handler.root, "//DEM_Information/Cartographic/Coordinate_Reference_System/Code", namespaces=handler.nss, check=True) l_geoCode = xml_tools.get_only_value( handler.root, "//DEM_Information/Geographic/Coordinate_Reference_System/Code", namespaces=handler.nss, check=True) if l_cartoCode is not None: self.ProjCode = l_cartoCode.text self.ProjType = "PROJECTED" elif l_geoCode is not None: self.ProjCode = l_geoCode.text self.ProjType = "GEOGRAPHIC" else: raise MajaDataException("Unknown DEM type") LOGGER.debug("DEM Projection Code: " + self.ProjCode) LOGGER.debug("DEM Projection Type: " + self.ProjType) self.Site = xml_tools.get_xml_string_value( handler.root, "//Specific_Product_Header/Instance_Id/Applicable_Site_Nick_Name", namespaces=handler.nss) if nbresol != 0: param_stats = {"im": self.ALTList[0]} stat_app = stats(self.ALTList[0]) self.ALT_Mean = stat_app.getoutput().get("mean") self.ALT_Max = stat_app.getoutput().get("max") self.ALT_Min = stat_app.getoutput().get("min") self.ALT_Stdv = stat_app.getoutput().get("stdv") self.ALT_LogicalName = "LOCAL=" + os.path.splitext( os.path.basename(file_hdr))[0] LOGGER.info("DEM Mean : " + str(self.ALT_Mean)) LOGGER.info("DEM Max : " + str(self.ALT_Max)) LOGGER.info("DEM Min : " + str(self.ALT_Min)) LOGGER.info("DEM Stdv : " + str(self.ALT_Stdv))
def get_string_value(self, xpath): return xml_tools.get_only_value(self.root, xpath, namespaces=self.nss).text
def get_string_value_of(self, key): return xml_tools.get_only_value(self.root, EARTH_EXPLORER_HANDLER_XPATH[key], namespaces=self.nss).text