def get_gipp_filename(dir_name, file_type): list_of_gipp = get_list_of_gipp_filenames(dir_name, file_type) if len(list_of_gipp) > 1: raise MajaDataException("More than one GIPP of type " + file_type + " has been detected in the input directory") if len(list_of_gipp) < 1: raise MajaDataException("No GIPP of type " + file_type + " has been detected in the input directory!") return list_of_gipp[0]
def get_gipp_filename_with_mission(dir_name, file_type, mission): list_of_gipp = get_list_of_gipp_filenames_with_mission(dir_name, file_type, mission) if len(list_of_gipp) > 1: raise MajaDataException("More than one GIPP of type " + file_type + " has been detected for the Mission <" +mission+"> in the input directory!") if len(list_of_gipp) < 1: raise MajaDataException("No GIPP of type " + file_type + " has been detected for the Mission <" +mission+"> in the input directory!") return list_of_gipp[0]
def get_tocr_for_model(self, model): if model in list(self.GIP_L2TOCR_LookUpTableConverter.keys()): return self.GIP_L2TOCR_LookUpTableConverter[ model].new_gipp_filename else: raise MajaDataException("No " + model + " in list of available TOCR luts")
def write_list_of_quality_indexes(self, listofquality): try: xnode = xml_tools.get_only_value(self.root, "//List_of_Quality_Indexes") except BaseException: raise MajaDataException("Error while reading the xml node '" + "//List_of_Quality_Indexes" + "' in the xml file! Details: ") l_count = len(listofquality) xnode.set("count", str(l_count)) for i in range(l_count): f = listofquality[i] node = et.Element("Quality_Index") node.set("sn", str(i + 1)) # Code node1 = et.Element("Code") node1.text = f.Code node.append(node1) # Value node2 = et.Element("Value") node2.text = f.Value node.append(node2) # band code if any if f.BandCode is not None: node3 = et.Element("Band_Code") node3.text = f.BandCode node.append(node3) xnode.append(node)
def generate_toa_caching(self, listOfTOAImageFileNames, reflectanceMultiplicationValues, working): """ :param listOfTOAImageFileNames: ListOfStrings :param reflectanceMultiplicationValues: ListOfDoubles :return: """ if len(reflectanceMultiplicationValues) != len( listOfTOAImageFileNames): raise MajaDataException( "Not the same number of band images and coefficients") # --------------------------------------------------------------- # Get the number of band with the number of TOA image files set in the input product directory l_NbBand = len(listOfTOAImageFileNames) # int # Convert the input jp2 images in tif for i in range(l_NbBand): toaFilename = listOfTOAImageFileNames[i] LOGGER.debug("Caching the <%s> image filename...", toaFilename) LOGGER.debug("Reflectance quantification value: %s", reflectanceMultiplicationValues[i]) # Initialize the TOA reader l_ImageFilename = os.path.join(working, "toaconverted_" + str(i) + ".tif") # extract_roi(toaFilename,[0],l_ImageFilename,working) app = multiply_by_scalar(toaFilename, reflectanceMultiplicationValues[i], output_image=l_ImageFilename, write_output=True) self._pipeline.add_otb_app(app) self._toa_scalar_list.append(app.getoutput()["out"])
def get_filename(self, directory, prefix, file_semantic, postfix, extention, check_file_exists): """ const std::string & directory, const std::string & prefix, const std::string & file_semantic, const std::string & postfix, const std::string & extention, const bool check_file_exists TODO: see to remove this part from build_filenames (two different features for one function) :param directory: :param prefix: :param file_semantic: :param postfix: :param extention: :param check_file_exists: :return: """ # Build the relative filename l_ImageFileName = prefix + "_" + file_semantic + "_" + postfix + extention # Build the absolute filename filename = os.path.join(directory, l_ImageFileName) # Check the existant files if check_file_exists and not os.path.exists(filename): raise MajaDataException( "L2ImageFilenames Provider: The L2 '{}' filename {} doesn't exist.\n" "The <{}> product is NOT valid !".format( file_semantic, filename, directory)) return filename
def get_band_index_from_list_datedm1_corresponding_with_band_code_date_d( self, band_code, bandwavedm1): # --------------------------------------------------------------------------------------------- l_theoreticalwavelength_dated = self.ListOfBandsTheoreticalWavelength.get( band_code) # --------------------------------------------------------------------------------------------- # Get the list of the Band(TheoreticalWavelength) in the L2 input composite image l_nblistofbandtheoreticalwavelengthdatedm1 = len(bandwavedm1) # --------------------------------------------------------------------------------------------- # Find the Index in the Date Dm1 list for i in range(0, l_nblistofbandtheoreticalwavelengthdatedm1): if (int(bandwavedm1[i]) == int(l_theoreticalwavelength_dated)): return i # --------------------------------------------------------------------------------------------- # If not found, throw an exception msg = "Error while checking the corresponding band from the current date D to the previous date D-1.\n" + \ "The band code '" + band_code + "' (with the theoretical wavelength '" + str(l_theoreticalwavelength_dated) + \ "') is not found in the composite of the L2 input product. The theoretical wavelength of the L2 input composite are (" + \ str(l_nblistofbandtheoreticalwavelengthdatedm1) + "): " # --------------------------------------------------------------------------------------------- for band in bandwavedm1: msg = msg + band + " " # --------------------------------------------------------------------------------------------- raise MajaDataException(msg)
def provide_filename(directory, basefilename, extentionfilename): """ :param directory: :param basefilename: :param extentionfilename: :return: """ # Set the number max of filename nbmaxfilenames = 10000 cptfilenames = 0 provide = False filename = "" # Loops and find a valid filename while not provide and cptfilenames < nbmaxfilenames: filename = os.path.join( directory, "{}{}.{}".format(basefilename, cptfilenames, extentionfilename)) # Checks if the filename already exists if not os.path.exists(filename): provide = True cptfilenames += 1 # 'nbMaxFilenames' file already exists. # Error: user must be clear it working dir if not provide: raise MajaDataException( "Utilities::ProvideFilename: Impossible to find a available filename in " "the {} caching directory ! The maximum number of filename is {}. Clean the " "directory.".format(directory, nbmaxfilenames)) return filename
def get_value(self, key, check=False): if key in self.l2_comm_values: return self.l2_comm_values[key] else: if check: return None else: raise MajaDataException("No " + key + " in GIPP L2COMM dictionnary")
def get_string_value_of(self, key): value = xml_tools.get_only_value(self.root, MUSCATE_HANDLER_XPATH[key]) if value is not None: return value.text else: raise MajaDataException("No " + MUSCATE_HANDLER_XPATH[key] + " available in the file " + self.main_xml_file)
def fully_resolve(a_path, check_existence=False): resolved = os.path.expanduser(os.path.expandvars(os.path.normpath(a_path))) if "$" in resolved: raise MajaProcessingError("Environment variable not resolved in %s" % resolved) if check_existence: if not os.path.exists(resolved): raise MajaDataException("File not found %s" % resolved) return resolved
def get_input_l2_image_product(dir, app_handler, enable_public_data, dem): LOGGER.info("Searching for L2 product in " + dir) list_of_product = get_list_of_input_l2_image_product_sorted( dir, app_handler, enable_public_data, dem) if len(list_of_product) > 1: raise MajaDataException( "There is more than one L2 image product in the input data.") return list_of_product[0]
def get_input_l1_image_product(dir, tile_id=None): LOGGER.info("Searching for L1 product in " + dir) list_of_product = get_list_of_input_l1_image_product_sorted( dir, tile_id=tile_id) if len(list_of_product) > 1: raise MajaDataException( "There is more than one L1 image product in the input data.") return list_of_product[0]
def get_value(self, key, check=False): if key in self.l2_site_values: return self.l2_site_values[key] else: if check: return None else: raise MajaDataException("No " + key + " in L2SITE dictionnary")
def get_user_conf_camera_filename(self, plugin_name): conf_file = self._userconf_directory + os.path.sep + "MAJAUserConfig_" + plugin_name + ".xml" if not os.path.exists(conf_file): raise MajaDataException("The admin configuration file <" + conf_file + "> (for the plugin <" + plugin_name + ">) doesn't exist!") userconfigcamerafilename = file_utils.copy_file_to_directory(conf_file, self._workingDirectory, notestmode=True) if self._stylesheet is not None: translate_xsl(userconfigcamerafilename, self._stylesheet) return userconfigcamerafilename
def get_aot_method(str_name): if str_name == "MULTITEMPORAL": method = AOTEstimation.MULTITEMPORAL elif str_name == "MULTISPECTRAL": method = AOTEstimation.MULTISPECTRAL elif str_name == "SPECTROTEMPORAL": method = AOTEstimation.SPECTROTEMPORAL else: raise MajaDataException("Utilities:GetAOTMethod: The AOT method " + str_name + "does not exist.") return method
def get_dircorr_method(str_name): if str_name == "Roy": method = DirectionalCorrection.ROY elif str_name == "None": method = DirectionalCorrection.DEACTIVATED elif str_name == "Lut": method = DirectionalCorrection.LUT else: raise MajaDataException( "Utilities:GetDirCorrMethod: The Directional Correction model " + str_name + " does not exist.") return method
def uncompress_file(src, dst): command_line = "tar -xjf " + src + " -C " + dst LOGGER.info("Uncompress %s", command_line) # TODO: TBC use append env or manage entire env for each COTS ? status = launch_command(command_line) # TODO: TBC MOve status to post ? # TODO: see status management by system command executor if status != 0: raise MajaDataException("Error running {}. Exit code {}".format(command_line, status))
def get_cams_status_string(status): if status == CAMSStatus.ACTIVATED_OK: str = "Activated_OK" elif status == CAMSStatus.ACTIVATED_NOAVAILABLEDATA: str = "Activated_NoAvailableData" elif status == CAMSStatus.ACTIVATED_PARTIALLYAVAILABLEDATA: str = "Activated_PartiallyAvailableData" elif status == CAMSStatus.DEACTIVATED: str = "Deactivated" else: raise MajaDataException("The Cams status " + status + "does not exist.") return str
def __init__(self, filename): self.ListOfBandsTheoreticalWavelength = {} self.conf = None try: self.conf = camera_admin_conf.parse(filename, True) LOGGER.debug( "Start read the 'List_of_Band_Theoretical_Wavelength', 'count' attribute..." ) # Set the list of theoretical wavelenght l_Count = self.conf.get_Composite( ).get_List_of_Band_Theoretical_Wavelength().get_count() # Loop under the sk and read the value of the node list_of_bands = self.conf.get_Composite( ).get_List_of_Band_Theoretical_Wavelength( ).get_Band_Theoretical_Wavelength() l_nbvalues = len(list_of_bands) LOGGER.debug("Nb Sk values: " + str(l_nbvalues)) # Check if the number of attribute conforms with the number of nodes if l_Count != l_nbvalues: MajaDataException( "In the Configuration file <" + filename + ">, the 'count' attribute value (" + str(l_Count) + ") is different than the number of sub-node found '" + str(l_nbvalues) + "'!") # Get the list of theoretical wavelength defined in the configuration file for wave in list_of_bands: self.ListOfBandsTheoreticalWavelength[ wave.get_sk()] = wave.get_valueOf_() LOGGER.debug("Adding " + wave.get_sk() + " = " + str(wave.get_valueOf_())) LOGGER.debug("Load the ConfigAdminCamera file done.") except Exception as e: raise MajaDataException( "Cannot create ConfigAdminCamera Instance with the following input filename <" + filename + "> !: " + e.message)
def get_list_of_packaged_dbl_files(self, absolute, check): nodes = xml_tools.get_all_values( self.root, EARTH_EXPLORER_HANDLER_XPATH["DBLFilenames"]) list_of_files = [nd.text for nd in nodes] if absolute: l_rootbasepath = os.path.dirname(self.main_xml_file) list_of_files = [ os.path.join(l_rootbasepath, nd.text) for nd in nodes ] if check: for fi in list_of_files: if not os.path.exists(fi): raise MajaDataException("File " + fi + " does not exists") return list_of_files
def get_list_of_input_l1_image_product_sorted(dir,tile_id=None): list_of_product = L1ImageInformationsProvider.get_list_of_l1products(dir,tile_id=tile_id) LOGGER.info("Nb L1 product found : " + str(list_of_product.__len__())) if list_of_product.__len__() == 0: raise MajaDataException("No L1 product found") dictofproduct = {} for pr in list_of_product: l1product = L1ImageInformationsProvider.create(pr,tile_id=tile_id) l1jdate = get_julianday_as_double(l1product.ProductDate) dictofproduct[l1jdate] = l1product dictofproduct_sorted = sorted(list(dictofproduct.items()), key=operator.itemgetter(0)) list_of_product_sorted = list() for prs in dictofproduct_sorted: list_of_product_sorted.append(prs[1]) return list_of_product_sorted
def uncompress_dbl_product(dbl_filename): no_ext = os.path.splitext(dbl_filename)[0] dbl_dir = os.path.realpath(no_ext + ".DBL.DIR") dbl_bin = os.path.realpath(no_ext + ".DBL.BIN") if os.path.isdir(dbl_dir): LOGGER.debug( "The DBL product <" + dbl_dir + "> is already uncompressed (.DBL.DIR directory already exists).") elif os.path.isfile(dbl_bin): LOGGER.debug( "The DBL product <" + dbl_bin + "> is already uncompressed (.DBL.BIN file already exists).") else: if not os.path.exists(dbl_filename): raise MajaDataException(dbl_filename + " doesn't exists !!!!") uncompress_file(dbl_filename, os.path.dirname(dbl_filename))
def get_list_of_input_l2_image_product_sorted(dir, app_handler, enable_public_data): list_of_product = L2ImageReaderProvider.get_list_of_l2_products(dir) LOGGER.info("Nb L2 product found : " + str(list_of_product.__len__())) if list_of_product.__len__() == 0: raise MajaDataException("No L2 product found") dictofproduct = {} for pr in list_of_product: l2product = L2ImageReaderProvider.create(pr, app_handler, enable_public_data) l2jdate = get_julianday_as_double(l2product.Date) dictofproduct[l2jdate] = l2product dictofproduct_sorted = sorted(list(dictofproduct.items()), key=operator.itemgetter(0)) list_of_product_sorted = list() for prs in dictofproduct_sorted: list_of_product_sorted.append(prs[1]) return list_of_product_sorted
def generate_toa(self, listOfTOAImageFileNames, reflectanceMultiplicationValues, working): """ :param listOfTOAImageFileNames: ListOfStrings :param reflectanceMultiplicationValues: ListOfDoubles :return: :rtype: string """ if len(reflectanceMultiplicationValues) != len( listOfTOAImageFileNames): raise MajaDataException( "Not the same number of band images and coefficients") # =======> GENERATE TOA CACHING l_ProjectionRef = self.generate_toa_caching( listOfTOAImageFileNames, reflectanceMultiplicationValues, working) LOGGER.debug("Caching TOA images done ...") return l_ProjectionRef
def get_integer_jday_from_filename(filename): # --------------------------------------------------------------------------------------------- # 4.3 find automatically the date in the filename. Find 'number' and without character # to be sur, remove the extention for working with this example: .._TOTO_20150414.mha # revert loop and finds number # Used to detect the date for : # L2 EarthExplorer product : ..../S4_TEST_S4C_L2VALD_AFAngola00D00B_20130223.HDR -> detects 20130223 # L2 Private EarthExplorer LTC mha file : # ...LTC.DBL.DIR/S4_TEST_S4C_LUTANX_L2VALD_CVERSAIL00D00B_20130402_LTC.DBL.mha # -> detects 20130402 filenamekey = split_string( os.path.splitext(os.path.basename(filename))[0], "_", True) number_list = [f for f in filenamekey if f.isdigit()] if len(number_list) == 0: raise MajaDataException("Filename " + filename + " does not contains a date") indate = date_utils.get_datetime_from_yyyymmdd(number_list[-1]) return date_utils.get_julianday_as_int(indate)
def write_private_ltc_header_list_of_dbl(self, list_of_dbl): """ <DBL_Organization> <List_of_Packaged_DBL_Files count="2"> <Packaged_DBL_File sn="1"> <Relative_File_Path>VE_TEST_VSC_LUTANX_L2VALD_CAMARGUE_20120103_LTC.DBL.DIR/VE_TEST_VSC_LUTANX_L2VALD_CAMARGUE_20120101_LTC.DBL.mha</Relative_File_Path> <File_Definition>VSC_LUTANX</File_Definition> </Packaged_DBL_File> <Packaged_DBL_File sn="2"> <Relative_File_Path>VE_TEST_VSC_LUTANX_L2VALD_CAMARGUE_20120103_LTC.DBL.DIR/VE_TEST_VSC_LUTANX_L2VALD_CAMARGUE_20120103_LTC.DBL.mha</Relative_File_Path> <File_Definition>VSC_LUTANX</File_Definition> </Packaged_DBL_File> </List_of_Packaged_DBL_Files> </DBL_Organization> """ LOGGER.debug("Start writing the list of LTC DBL node ...") try: xnode = xml_tools.get_only_value( self.root, "//DBL_Organization/List_of_Packaged_DBL_Files") except BaseException: raise MajaDataException( "Error while reading the xml node '" + "//DBL_Organization/List_of_Packaged_DBL_Files" + "' in the xml file! Details: ") l_count = len(list_of_dbl) xnode.set("count", str(l_count)) for i in range(l_count): f = list_of_dbl[i] node = et.Element("Packaged_DBL_File") node.set("sn", str(i + 1)) # Relative file path node2 = et.Element("Relative_File_Path") node2.text = f.relativefilepath node.append(node2) # File definition node3 = et.Element("File_Definition") node3.text = f.filedefinition node.append(node3) # Insert to doc xnode.append(node) LOGGER.debug("Writing the list of LTC DBL node DONE")
def extract_cams_info(p_indir, p_sat, p_model): # ************************************************************************************************************* # Read the GIPP file "GIP_L2TOCR" # ************************************************************************************************************* LOGGER.info("Starting GIP_L2TOCR reading ...") l_gippfilenames = gipp_utils.get_list_of_gipp_filenames_with_mission( p_indir, "GIP_L2TOCR", p_sat) l_gippfilename = EarthExplorerUtilities.find_gipp_with_sub_set_of_value( l_gippfilenames, p_model) if l_gippfilename is None: raise MajaDataException("Missing input L2TOCR gipp for model " + p_model) # CAMS informations read requested l_lut_xml_handler = GippLUTEarthExplorerXMLFileHandler(l_gippfilename) if l_lut_xml_handler.has_cams_info(): return l_lut_xml_handler.get_cams_info() else: LOGGER.debug("GIPP doesn't have CAMS info available") return None
def write_list_of_gipp_files(self, listofgipps, write_nature_node=True, p_xpath="//List_of_GIPP_Files", p_subxpath="GIPP_File"): """ <List_of_GIPP_Files count="11"> <GIPP_File sn="1"> <Nature>GIP_CKEXTL</Nature> <File_Location>NIL=OTHER:EXTERNAL</File_Location> <Logical_Name>VE_TEST_GIP_CKEXTL_S_CAMARGUE_00001_00000000_99999999</Logical_Name> </GIPP_File> """ # ", "GIPP_File try: xnode = xml_tools.get_only_value(self.root, p_xpath) except BaseException: raise MajaDataException("Error while reading the xml node '" + p_xpath + "' in the xml file! Details: ") l_count = len(listofgipps) xnode.set("count", str(l_count)) for i in range(l_count): f = listofgipps[i] node = et.Element(p_subxpath) node.set("sn", str(i + 1)) if write_nature_node: # Nature node2 = et.Element("Nature") node2.text = f.Nature node.append(node2) # File_Location node3 = et.Element("File_Location") node3.text = f.FileLocation node.append(node3) # Logical_Name node4 = et.Element("Logical_Name") node4.text = f.LogicalName node.append(node4) xnode.append(node)
def write_list_of_annex_files(self, listofannex): """ <List_of_Annex_Files count="6"> <Annex_File sk="CLD"> <Nature>VSC_PDTANX</Nature> <File_Location>./VE_TEST_VSC_L2VALD_CAMARGUE_20120101.DBL.DIR/VE_TEST_VSC_PDTANX_L2VALD_CAMARGUE_20120101_CLD.HDR</File_Location> <Logical_Name>VE_TEST_VSC_PDTANX_L2VALD_CAMARGUE_20120101_CLD</Logical_Name> </Annex_File> """ try: xnode = xml_tools.get_only_value( self.root, "//Product_Organization/List_of_Annex_Files") except BaseException: raise MajaDataException( "Error while reading the xml node '" + "//Product_Organization/List_of_Annex_Files" + "' in the xml file! Details: ") l_count = len(listofannex) xnode.set("count", str(l_count)) for i in range(l_count): f = listofannex[i] node = et.Element("Annex_File") node.set("sk", f.sk) # Nature node2 = et.Element("Nature") node2.text = f.Nature node.append(node2) # File_Location node3 = et.Element("File_Location") node3.text = f.FileLocation node.append(node3) # Logical_Name node4 = et.Element("Logical_Name") node4.text = f.LogicalName node.append(node4) xnode.append(node)