Esempio n. 1
0
 def submit_data_unit(self, data_unit_description):
     """ creates a data unit object and binds it to a physical resource (a pilotdata) """
     du = DataUnit(pilot_data=None, 
                   data_unit_description=data_unit_description)
     self.data_units[du.id]=du
     self.du_queue.put(du)
     # queue currently not persisted
     CoordinationAdaptor.update_cds(self.url, self)
     return du
Esempio n. 2
0
    def __stage_out_data_units(self, output_data=[], workingdirectory=None):
        """ stage out data to a specified data unit pilot data """
        logger.debug("Stage out output files")
        """ Parsing output data field of job description:
            {
            ...
             "output_data": [
                            {
                             output_data_unit.get_url(): 
                             ["stdout.txt", "stderr.txt"]
                            }
                            ]
            }    
        """
        try:
            for data_unit_dict in output_data:
                logger.debug("Process: " + str(data_unit_dict))
                for du_url in data_unit_dict.keys(
                ):  # go through all dicts (each representing 1 PD)
                    #pd_url = self.__get_pd_url(du_url)
                    #pilot_data = PilotData(pd_url=pd_url)
                    #du = pilot_data.get_du(du_url)
                    du = DataUnit(du_url=du_url)
                    file_list = data_unit_dict[du_url]
                    logger.debug("Add files: " + str(file_list))
                    all_files = []
                    for output_file in file_list:
                        expanded_files = [output_file]
                        if output_file.find("*") >= 0 or output_file.find(
                                "?") >= 0:
                            expanded_files = self.__expand_file_pattern(
                                output_file, workingdirectory)
                            logger.debug("Expanded files: " +
                                         str(expanded_files))

                        for f in expanded_files:
                            all_files.append(os.path.join(workingdirectory, f))

                    du.add_files(all_files)
                    for f in all_files:
                        os.remove(f)
        except:
            logger.error("Stage out of files failed.")
            self.__print_traceback()
Esempio n. 3
0
 def __stage_out_data_units(self, output_data=[], workingdirectory=None):
     """ stage out data to a specified data unit pilot data """
     logger.debug("Stage out output files")
     
     """ Parsing output data field of job description:
         {
         ...
          "output_data": [
                         {
                          output_data_unit.get_url(): 
                          ["stdout.txt", "stderr.txt"]
                         }
                         ]
         }    
     """
     try:
         for data_unit_dict in output_data: 
             logger.debug("Process: " + str(data_unit_dict))
             for du_url in data_unit_dict.keys(): # go through all dicts (each representing 1 PD) 
                 #pd_url = self.__get_pd_url(du_url)
                 #pilot_data = PilotData(pd_url=pd_url)
                 #du = pilot_data.get_du(du_url)
                 du = DataUnit(du_url=du_url)
                 file_list = data_unit_dict[du_url]
                 logger.debug("Add files: " + str(file_list))
                 all_files=[]
                 for output_file in file_list:
                     expanded_files = [output_file]
                     if output_file.find("*")>=0 or output_file.find("?")>=0:
                         expanded_files = self.__expand_file_pattern(output_file, workingdirectory)
                         logger.debug("Expanded files: " + str(expanded_files))
                         
                     for f in expanded_files:
                         all_files.append(os.path.join(workingdirectory, f))
                  
                 du.add_files(all_files)                        
                 for f in all_files:       
                     os.remove(f)
     except:
         logger.error("Stage out of files failed.")
         self.__print_traceback()
Esempio n. 4
0
 def __stage_in_data_units(self, input_data=[], target_directory="."):
     """ stage in data units specified in input_data field """
     try:
         logger.debug("Stage in input files to: %s" % target_directory)
         for i in input_data:
             du = DataUnit(du_url=i)
             logger.debug("Restored DU... call get state()")
             logger.debug("DU State: " + du.get_state())
             du.wait()
             logger.debug("Reconnected to DU. Exporting it now...")
             du.export(target_directory)
     except:
         logger.error("Stage-in of files failed.")
         self.__print_traceback()
Esempio n. 5
0
 def __stage_in_data_units(self, input_data=[], target_directory="."):
     """ stage in data units specified in input_data field """
     logger.debug("Stage in input files to: %s" % target_directory)
     for i in input_data:
         #pd_url = self.__get_pd_url(i)
         #du_id = self.__get_du_id(i)
         #pd = PilotData(pd_url=pd_url)
         #du = pd.get_du(du_id)
         #du.export(target_directory)
         du = DataUnit(du_url=i)
         du.wait()
         du.export(target_directory)
Esempio n. 6
0
 def __stage_in_data_units(self, input_data=[], target_directory="."):
     """ stage in data units specified in input_data field """
     try:
         logger.debug("Stage in input files to: %s"%target_directory)
         for i in input_data:
             du = DataUnit(du_url=i)
             logger.debug("Restored DU... call get state()")
             logger.debug("DU State: " + du.get_state())
             du.wait()
             logger.debug("Reconnected to DU. Exporting it now...")
             du.export(target_directory)
     except:
         logger.error("Stage-in of files failed.")
         self.__print_traceback()