Exemplo n.º 1
0
  def _setUpGenieParticulate(self):
      
      
      self.mainDatabase = self.conf.get("ParticulateDatabaseAccess","hostname")
      self.mainUser = self.conf.get("ParticulateDatabaseAccess","user")
      self.mainPassword = self.conf.get("ParticulateDatabaseAccess","password")
      self.mainActivateTimer = self.conf.getboolean("ParticulateDatabaseAccess","activateTimer",True)
 
      TestSAMPMLCreator.c_log.info("")
      TestSAMPMLCreator.c_log.info("Particulate Database=%s"%(self.mainDatabase))
 
      # create DB connector
      self.mainConn = DatabaseConnector(self.mainDatabase,self.mainUser,self.mainPassword,self.mainActivateTimer)
 
      self.mainConn.connect()
      
      self.ParticulateArchiveDatabaseAccess = self.conf.get("ParticulateArchiveDatabaseAccess","hostname")
      self.archiveUser = self.conf.get("ParticulateArchiveDatabaseAccess","user")
      self.archivePassword = self.conf.get("ParticulateArchiveDatabaseAccess","password")
      self.archiveActivateTimer = self.conf.getboolean("ParticulateArchiveDatabaseAccess","activateTimer",True)
 
 
      TestSAMPMLCreator.c_log.info("Archive Database=%s"%(self.ParticulateArchiveDatabaseAccess))
 
      # create DB connector
      self.archConn = DatabaseConnector(self.ParticulateArchiveDatabaseAccess,self.archiveUser,self.archivePassword,self.archiveActivateTimer)
 
      self.archConn.connect()
      
      # compile xpath expressions used to check final product
      self.xpath_calIDs = etree.XPath("//*[local-name(.)='CalibrationInformation']/*[local-name(.)='Calibration']/@ID")
      self.xpath_specalIDs = etree.XPath("//*[local-name(.)='MeasuredInformation']/*[local-name(.)='Spectrum']/@calibrationIDs")
Exemplo n.º 2
0
    def __init__(self,a_args):
        
        super(Runner,self).__init__()
          
        # create an empty shell Conf object
        self._conf     = self._load_configuration(a_args)
        
        self._log = LoggerFactory.get_logger("Runner")
    
        # setup the prod database and connect to it
        self._ngDatabase        = self._conf.get("NobleGazDatabaseAccess","hostname")
        self._ngUser            = self._conf.get("NobleGazDatabaseAccess","user")
        self._ngPassword        = self._conf.get("NobleGazDatabaseAccess","password")
        self._ngActivateTimer   = self._conf.getboolean("NobleGazDatabaseAccess","activateTimer",True)
   
        # create DB connector
        self._ngMainConn = DatabaseConnector(self._ngDatabase,self._ngUser,self._ngPassword,self._ngActivateTimer)

        # setup the archive database and connect to it
        self._ArchiveDatabaseAccess       = self._conf.get("ArchiveDatabaseAccess","hostname")
        self._archiveUser           = self._conf.get("ArchiveDatabaseAccess","user")
        self._archivePassword       = self._conf.get("ArchiveDatabaseAccess","password")
        self._archiveActivateTimer  = self._conf.getboolean("ArchiveDatabaseAccess","activateTimer",True)
        
        # create DB connector
        self._ngArchConn = DatabaseConnector(self._ArchiveDatabaseAccess,self._archiveUser,self._archivePassword,self._archiveActivateTimer)
        
        #connect to the DBs
        self._ngMainConn.connect()
        self._ngArchConn.connect()
Exemplo n.º 3
0
    def __init__(self, a_args):

        super(Runner, self).__init__()

        # create an empty shell Conf object
        self._conf = Conf.get_instance()

        self._log = LoggerFactory.get_logger("Runner")

        # setup the prod database and connect to it
        self._ngDatabase = self._conf.get("NobleGazDatabaseAccess", "hostname")
        self._ngUser = self._conf.get("NobleGazDatabaseAccess", "user")
        self._ngPassword = self._conf.get("NobleGazDatabaseAccess", "password")
        self._ngActivateTimer = self._conf.getboolean("NobleGazDatabaseAccess", "activateTimer", True)

        if self._conf.getboolean("NobleGazDatabaseAccess", "obfuscatePassword", False):
            self._ngPassword = ctbto.common.utils.deobfuscate_string(self._ngPassword)

        # create DB connector
        self._ngMainConn = DatabaseConnector(self._ngDatabase, self._ngUser, self._ngPassword, self._ngActivateTimer)

        # setup the archive database and connect to it
        self._ArchiveDatabaseAccess = self._conf.get("ArchiveDatabaseAccess", "hostname")
        self._archiveUser = self._conf.get("ArchiveDatabaseAccess", "user")
        self._archivePassword = self._conf.get("ArchiveDatabaseAccess", "password")

        if self._conf.getboolean("ArchiveDatabaseAccess", "obfuscatePassword", False):
            self._archivePassword = ctbto.common.utils.deobfuscate_string(self._archivePassword)

        self._archiveActivateTimer = self._conf.getboolean("ArchiveDatabaseAccess", "activateTimer", True)

        # create DB connector
        self._ngArchConn = DatabaseConnector(
            self._ArchiveDatabaseAccess, self._archiveUser, self._archivePassword, self._archiveActivateTimer
        )
        # connect to the DBs
        self._ngMainConn.connect()

        self._ngArchConn.connect()
Exemplo n.º 4
0
  def _setUpNobleGaz(self):
         
      self.nbDatabase = self.conf.get("NobleGazDatabaseAccess","hostname")
      self.nbUser = self.conf.get("NobleGazDatabaseAccess","user")
      self.nbPassword = self.conf.get("NobleGazDatabaseAccess","password")
      self.nbActivateTimer = self.conf.getboolean("NobleGazDatabaseAccess","activateTimer",True)
 
 
      TestSAMPMLCreator.c_log.info("Noble Gaz Database=%s"%(self.nbDatabase))
 
      # create DB connector
      self.nbConn = DatabaseConnector(self.nbDatabase,self.nbUser,self.nbPassword,self.nbActivateTimer)
 
      self.nbConn.connect()
Exemplo n.º 5
0
class Runner(object):
    """ Class for fetching and producing the ARR """
    
    def __init__(self,a_args):
        
        super(Runner,self).__init__()
          
        # create an empty shell Conf object
        self._conf     = self._load_configuration(a_args)
        
        self._log = LoggerFactory.get_logger("Runner")
    
        # setup the prod database and connect to it
        self._ngDatabase        = self._conf.get("NobleGazDatabaseAccess","hostname")
        self._ngUser            = self._conf.get("NobleGazDatabaseAccess","user")
        self._ngPassword        = self._conf.get("NobleGazDatabaseAccess","password")
        self._ngActivateTimer   = self._conf.getboolean("NobleGazDatabaseAccess","activateTimer",True)
   
        if self._conf.getboolean("NobleGazDatabaseAccess","obfuscatePassword", False):
            self._ngPassword = ctbto.common.utils.deobfuscate_string(self._ngPassword)
   
        # create DB connector
        self._ngMainConn = DatabaseConnector(self._ngDatabase,self._ngUser,self._ngPassword,self._ngActivateTimer)

        # setup the archive database and connect to it
        self._archiveDatabaseAccess       = self._conf.get("ArchiveDatabaseAccess","hostname")
        self._archiveUser           = self._conf.get("ArchiveDatabaseAccess","user")
        self._archivePassword       = self._conf.get("ArchiveDatabaseAccess","password")
        self._archiveActivateTimer  = self._conf.getboolean("ArchiveDatabaseAccess","activateTimer",True)
        
        if self._conf.getboolean("ArchiveDatabaseAccess","obfuscatePassword", False):
            self._archivePassword = ctbto.common.utils.deobfuscate_string(self._archivePassword)
        
        # create DB connector
        self._ngArchConn = DatabaseConnector(self._archiveDatabaseAccess,self._archiveUser,self._archivePassword,self._archiveActivateTimer)
        
        #connect to the DBs
        self._ngMainConn.connect()
        self._ngArchConn.connect()
    
    @classmethod
    def log_in_file(self,aMessage):
        """ to log in the file as the ROOT logger """
        
        log = LoggerFactory.get_logger("ROOT")
        log.info(aMessage)
        
    def _load_configuration(self,a_args):
        """
            try to load the configuration from the config file.
            priority rules: if --conf_dir is set, try to read a dir/rnpicker.config. Otherwise look for RNPICKER_CONF_DIR env var
        
            Args:
               None 
               
            Returns:
               return a conf object
        
            Raises:
               exception
        """
        
        #read from command_line
        dir = a_args.get('conf_dir',None)
        
        #try to read from env
        if dir == None:
            dir = os.environ.get('RNPICKER_CONF_DIR',None)
        else:
            #always force the ENV Variable
            os.environ['RNPICKER_CONF_DIR'] = dir
        
        if dir is None:
            raise ConfAccessError('The conf dir needs to be set from the command line or using the env variable RNPICKER_CONF_DIR')
        
        if os.path.isdir(dir):
            os.environ[Conf.ENVNAME] = '%s/%s'%(dir,'rnpicker.config')
            
            return Conf.get_instance()
        else:
            raise ConfAccessError('The conf dir %s set with the env variable RNPICKER_CONF_DIR is not a dir'%(dir))
    
    def _create_temp_directories(self,dir):
        
        # try to make the dir if necessary
        ctbto.common.utils.makedirs('%s/data'%(dir))
        
    def _create_db_directories(self,dir):
        
        # try to make the dir if necessary
        ctbto.common.utils.makedirs('%s'%(dir))   
    
    def _get_list_of_sampleIDs(self, last_time_sent, stations='',beginDate='2008-07-01',endDate='2008-07-31',spectralQualif='FULL',nbOfElem='10000000'):
        
        l = ','.join(map(str,stations)) #IGNORE:W0141
        
        result = self._ngMainConn.execute(SQL_GETSAMPLEIDS%(l,beginDate,endDate,spectralQualif,nbOfElem))
        
        sampleIDs= []
        
        rows = result.fetchall()
       
        for row in rows:
            sampleIDs.append(row[0])
       
        self._log.info("There are %d products (samples) for %s. Last time products for this day were sent : %s." % (len(sampleIDs), beginDate.split(' ')[0], last_time_sent))
        self.log_in_file("List of sampleIDs to fetch: %s"%(sampleIDs))
        
        sampleIDs.sort()
        
        return  sampleIDs

    def _get_all_stations(self,a_stations_types):
        
        sta_ids    = []
        
        for type in a_stations_types:
        
            if   type == 'SAUNA':
                result = self._ngMainConn.execute(self._conf.get("Products","Sauna_Stations_SQL",SQL_GETALLSAUNASTATIONCODES))
            elif type == 'SPALAX':
                result = self._ngMainConn.execute(self._conf.get("Products","Spalax_Stations_SQL",SQL_GETALLSPALAXSTATIONCODES))
        
            sta_codes  = []
           
            rows   = result.fetchall()
        
            for row in rows:
                sta_codes.append(row[0])
                sta_ids.append(row[1])
            
            self._log.info("Found %d %s stations."%(len(sta_codes),type))
            self.log_in_file("Found the following %s stations: %s."%(type,sta_codes))
        
        return sta_ids
    
    def _get_list_of_days_to_search(self,a_db_dict,a_from):
        """ 
           return list of days to search using the db_dict content.
           If a_from is passed and valid use as the from_date.
           If there are some days in db_dict set the from_date from this point.
           If not use today.
        
           Args:
               a_db_dict       : Persistent dictionary containing what has already been sent
               
            Returns:
               
        
            Raises:
               exception
        """
        list_of_days = []
        iso_from_day = None
        
        # no from date so look in the group_db to find the oldest date
        # if no date has been found use the first date as today
        if a_from == None:
            l_keys = a_db_dict.keys()
            if len(l_keys) > 0:
                iso_from_day = min(l_keys)
            else:
                iso_from_day = ctbto.common.time_utils.getToday() 
        else:
            iso_from_day = a_from
        
        from_datetime = ctbto.common.time_utils.getDateTimeFromISO8601(iso_from_day)
        
        now_datetime  = datetime.datetime.today()
        
        nb_days = (now_datetime - from_datetime).days
        
        # normal case nb_days is >= 0
        if nb_days >= 0:
            # iterate over the date days
            cpt = 0
            temp_datetime = from_datetime
            while cpt <= nb_days:
                list_of_days.append(ctbto.common.time_utils.getISO8601fromDateTime(temp_datetime))
                temp_datetime = temp_datetime + datetime.timedelta(days=1)
                cpt += 1
        else:
            self._log.info("Error nb_days should >=0")
            
        return list_of_days
    
    def _get_list_of_new_samples_to_email(self, a_db_dict, a_list_of_days, a_station_types,force_resend=False):
        """
            Method returning what samples needs to be sent an fetched for a particular day.
            The day is designated by searched_day.
        
            Args:
               a_db_dict       : Persistent dictionary containing what has already been sent
               a_searched_day  : Searched day (usually dd:mm:yyT00:00:00). It is a datetime object
               
            Returns:
              
        
            Raises:
               exception
        """
        
        # get all related stations
        stations      = self._get_all_stations(a_station_types)
        
        result = {}
        
        # for each day check if there is more samples to retrieve that what is in the db
        # if yes add the new samples to the list of samples to fetch
        for day in a_list_of_days:
            
            if day in a_db_dict:
                last_time_sent = a_db_dict[day][LAST_TIME_SENT]
            else:
                last_time_sent = "never"
            
            begin_date = ctbto.common.time_utils.getDateTimeFromISO8601(day)
            end_date   = begin_date + datetime.timedelta(days=1)
            
            # get them in Oracle format
            d1 = ctbto.common.time_utils.getOracleDateFromDateTime(begin_date)
            d2 = ctbto.common.time_utils.getOracleDateFromDateTime(end_date)
            
            # get list of samples for the chosen day
            l        = self._get_list_of_sampleIDs(last_time_sent, stations, d1, d2)
            
            if not force_resend:
                l_set    = set(l)
            
                # get previous list
                if day in a_db_dict:
                    l_prev_set = set(a_db_dict[day][SAMPLES_KEY])
                    new_samples_set = l_set.difference(l_prev_set)
                else:
                    new_samples_set = l_set
            
                if len(new_samples_set) > 0:
                    l = list(new_samples_set)
                    l.sort()   
                    self._log.info("%d new products to be retrieved for %s."%(len(l),day))
                    result[day] = l
            else:
                l.sort()
                if len(l) > 0:
                    self._log.info("Will fetch the %d new products for %s."%(len(l),day))
                result[day] = l
                
        # print all values
        full_list = []
        for value in result.itervalues():
            full_list.extend(value)
        
        if len(full_list) > 0:
            full_list.sort()
            Runner.log_in_file("Will fetch the following sampleIDs"%(full_list))
        
        return result
               
    def _save_in_id_database(self,a_id,a_dir,a_db_dict,a_emailed_list,a_searched_day,a_sending_time_stamp):
        """
            Save the information related to the current batch in the db_dict.
            
            Args:
               a_id                 : email group name,
               a_dir                : directory where the db_dict is going to be stored,
               a_db_dict            : Persistent dictionary containing what has already been sent,
               a_emailed_list       : Searched day (usually dd:mm:yyT00:00:00). It is a datetime object,
               a_searched_day       : the day for which the samples are retrieved (key in db_dict)
               a_sending_time_stamp : timestamp when the data was sent (key for the history) 
               
            Returns:
               None
        
            Raises:
               exception
        """
        key = a_searched_day
        
        # if it doesn't exist, initialize the structure
        if key not in a_db_dict:
            a_db_dict[key] = {}
            a_db_dict[key][SAMPLES_KEY]    = []
            a_db_dict[key][HISTORY_KEY]    = {}
            a_db_dict[key][LAST_TIME_SENT] = None
            
            
        info_dict = a_db_dict.get(key,{})
        
        l = info_dict.get(SAMPLES_KEY,[])
        
        l.extend(a_emailed_list) 
        
        a_db_dict[key][SAMPLES_KEY] = l
        
        # Add history info
        hist_d = info_dict[HISTORY_KEY]
        
        hist_d[a_sending_time_stamp] = a_emailed_list
        
        a_db_dict[key][LAST_TIME_SENT] = a_sending_time_stamp
        
        filename = "%s/%s.emaildb"%(a_dir,a_id)
        
        f = open(filename,'w')
        
        pickle.dump(a_db_dict,f) 
        
        f.close()
        
    def _remove_expired_days_from_db_dict(self,a_db_dict,a_dir,a_id):
        """ 
            remove all samples that are older than the limit given in the config file 
        """
        self._log.info("Remove information from the group database %s/%s.emaildb if expired"%(a_dir,a_id))
        
        limit = self._conf.getint('AutomaticEmailingInformation','expirationDate',20)
            
        keys = a_db_dict.keys()
        
        keys.sort()
        
        now_datetime  = datetime.datetime.today()
        
        for key in keys:
            # get a datetime so nothing to do
            timestamp = a_db_dict[key].get(LAST_TIME_SENT,None)
            
            # to be compatible with the previous version
            # if no LAST_TIME_SENT get now time
            if timestamp == None:
                timestamp = self._get_now_timestamp()
                a_db_dict[key][LAST_TIME_SENT] = timestamp
        
            d = datetime.datetime.strptime(timestamp,'%Y-%m-%dT%H%M%S')
        
            diff = now_datetime - d
            if diff.days >= limit:
                self._log.info("Remove %s day information from the group database as no sending have been done for the last %s days."%(key,limit))
                del a_db_dict[key]
            
    def _get_id_database(self,a_dir,a_id):
        """
            return a persistent list if it was stored previously in the db dir. This file should contain a dict of the last five email shots
        
            Args:
               None 
               
            Returns:
               return list object
        
            Raises:
               exception
        """
        filename = "%s/%s.emaildb"%(a_dir,a_id)
        
        data = {}
        
        if os.path.exists(filename):
            f = open(filename)
            data = pickle.load(f)
            f.close()
            
        return data
    
    def _move_sent_files_to_files_db(self,a_origin_dir,a_destination_dir):
        """
           move all files from a_origin_dir in a_destination_dir and delete the a_origin_dir
        """
        try:
            for root,dirs,_ in os.walk(a_origin_dir):
                for dir in dirs:
                    if dir in ['ARR','samples']:
                        dest = '%s/%s'%(a_destination_dir,dir)
                        for i_r,_,i_files in os.walk('%s/%s'%(root,dir)):
                            # only consider files in ARR and in samples
                            for filename in i_files:
                                shutil.move('%s/%s'%(i_r,filename),dest)
             
        except Exception, e: #IGNORE:W0703,W0702
            # hide error in logs because it is a minor error
            self._log.debug("Error when trying to move retrieved files from %s into %s.Raised Exception %s"%(a_origin_dir,e))
        finally:
Exemplo n.º 6
0
class Runner(object):
    """ Class for fetching and producing the ARR """

    def __init__(self, a_args):

        super(Runner, self).__init__()

        # create an empty shell Conf object
        self._conf = Conf.get_instance()

        self._log = LoggerFactory.get_logger("Runner")

        # setup the prod database and connect to it
        self._ngDatabase = self._conf.get("NobleGazDatabaseAccess", "hostname")
        self._ngUser = self._conf.get("NobleGazDatabaseAccess", "user")
        self._ngPassword = self._conf.get("NobleGazDatabaseAccess", "password")
        self._ngActivateTimer = self._conf.getboolean("NobleGazDatabaseAccess", "activateTimer", True)

        if self._conf.getboolean("NobleGazDatabaseAccess", "obfuscatePassword", False):
            self._ngPassword = ctbto.common.utils.deobfuscate_string(self._ngPassword)

        # create DB connector
        self._ngMainConn = DatabaseConnector(self._ngDatabase, self._ngUser, self._ngPassword, self._ngActivateTimer)

        # setup the archive database and connect to it
        self._ArchiveDatabaseAccess = self._conf.get("ArchiveDatabaseAccess", "hostname")
        self._archiveUser = self._conf.get("ArchiveDatabaseAccess", "user")
        self._archivePassword = self._conf.get("ArchiveDatabaseAccess", "password")

        if self._conf.getboolean("ArchiveDatabaseAccess", "obfuscatePassword", False):
            self._archivePassword = ctbto.common.utils.deobfuscate_string(self._archivePassword)

        self._archiveActivateTimer = self._conf.getboolean("ArchiveDatabaseAccess", "activateTimer", True)

        # create DB connector
        self._ngArchConn = DatabaseConnector(
            self._ArchiveDatabaseAccess, self._archiveUser, self._archivePassword, self._archiveActivateTimer
        )
        # connect to the DBs
        self._ngMainConn.connect()

        self._ngArchConn.connect()

    @classmethod
    def load_configuration(cls, a_args):
        """
            try to load the configuration from the config file.
            priority rules: if --conf_dir is set, try to read a dir/rnpicker.config. Otherwise look for RNPICKER_CONF_DIR env var
        
            Args:
               None 
               
            Returns:
               return a conf object
        
            Raises:
               exception
        """
        # read from command_line
        dir = a_args.get("conf_dir", None)

        # try to read from env
        if dir == None:
            dir = os.environ.get("RNPICKER_CONF_DIR", None)
        else:
            # always force the ENV Variable
            os.environ["RNPICKER_CONF_DIR"] = dir

        if dir is None:
            raise ConfAccessError(
                "The conf dir needs to be set from the command line or using the env variable RNPICKER_CONF_DIR"
            )

        if os.path.isdir(dir):
            os.environ[Conf.ENVNAME] = "%s/%s" % (dir, "rnpicker.config")

            return Conf.get_instance()
        else:
            raise ConfAccessError("The conf dir %s set with the env variable RNPICKER_CONF_DIR is not a dir" % (dir))

    @classmethod
    def log_in_file(self, aMessage):
        """ to log in the file as the ROOT logger """

        log = LoggerFactory.get_logger("ROOT")
        log.info(aMessage)

    def _get_list_of_sampleIDs(
        self, stations="", beginDate="2008-07-01", endDate="2008-07-31", spectralQualif="FULL", nbOfElem="10000000"
    ):

        l = ",".join(map(str, stations))  # IGNORE:W0141

        result = self._ngMainConn.execute(SQL_GETSAMPLEIDS % (l, beginDate, endDate, spectralQualif, nbOfElem))

        sampleIDs = []

        rows = result.fetchall()

        for row in rows:
            sampleIDs.append(row[0])

        self._log.info("Generate products for %d sampleIDs" % (len(sampleIDs)))

        self.log_in_file("list of sampleIDs to fetch: %s" % (sampleIDs))

        return sampleIDs

    def _get_stations_ids(self, a_station_codes):

        result = self._ngMainConn.execute(SQL_GETALLSTATIONIDSFROMCODES % (",".join(a_station_codes)))

        sta_ids = []

        rows = result.fetchall()

        for row in rows:
            sta_ids.append(row[0])

        # Error message is no ids found for the stations
        if len(sta_ids) == 0:
            raise Exception(
                "Cannot find any sample ids for the stations %s. Are you sure they are valid station codes ?"
                % (a_station_codes)
            )

        return sta_ids

    def _get_all_stations(self, a_stations_types):

        sta_ids = []

        for type in a_stations_types:

            if type == "SAUNA":
                result = self._ngMainConn.execute(SQL_GETALLSAUNASTATIONCODES)
            elif type == "SPALAX":
                result = self._ngMainConn.execute(SQL_GETALLSPALAXSTATIONCODES)

            sta_codes = []

            rows = result.fetchall()

            for row in rows:
                sta_codes.append(row[0])
                sta_ids.append(row[1])

            self._log.info("Found %d %s stations." % (len(sta_codes), type))
            self.log_in_file("Found the following %s stations: %s." % (type, sta_codes))

        return sta_ids

    def _create_results_directories(self, dir):

        # TODO need to fix that as there are some issues with the permissions checking
        # if os.path.exists(dir) and not os.access('%s/samples'%(dir),os.R_OK | os.W_OK |os.X_OK):
        #    raise Exception("Do not have the right permissions to write in result's directory %s.Please choose another result's SAMPML directory."%(dir))

        # if os.path.exists('%s/samples'%(dir)) and not os.access('%s/samples'%(dir),os.R_OK | os.W_OK):
        #    raise Exception("Do not have the right permissions to write in result's SAMPML directory %s.Please choose another result's SAMPML directory."%('%s/samples'%(dir)))

        # if os.path.exists('%s/ARR'%(dir)) and not os.access('%s/ARR'%(dir),os.R_OK | os.W_OK):
        #    raise Exception("Do not have the right permissions to write in result's SAMPML directory %s.Please choose another result's SAMPML directory."%('%s/ARR'%(dir)))

        # try to make the dir if necessary
        ctbto.common.utils.makedirs("%s/samples" % (dir))

        # try to make the dir if necessary
        ctbto.common.utils.makedirs("%s/ARR" % (dir))

    def _clean_cache(self):
        """ clean the cache directory """

        path = self._conf.get("Caching", "dir", None)

        self._log.info("Clean the cached data under %s" % (path))

        if path is not None:
            ctbto.common.utils.delete_all_under(path)

    def _clean_cached_spectrum(self):
        """ clean the cached spectrum """
        path = self._conf.get("RemoteAccess", "localdir", None)

        self._log.info("Clean the cached spectra under %s" % (path))

        if path is not None:
            ctbto.common.utils.delete_all_under(path)

    def execute(self, a_args):

        if a_args == None or a_args == {}:
            raise Exception("No commands passed. See usage message.")

        self._log.info("*************************************************************")
        self._log.info("Configuration infos read from %s" % (self._conf.get_conf_file_path()))

        self._log.info("*************************************************************\n")

        cache_cleaned = False
        local_spectra_cleaned = False

        # check if we need to clean the cache
        if a_args["clean_cache"]:
            self._clean_cache()
            cache_cleaned = True

        if a_args["clean_local_spectra"]:
            self._clean_cached_spectrum()
            local_spectra_cleaned = True

        # check if we can write in case the dir already exists
        dir = a_args["dir"]
        self._create_results_directories(dir)

        # default request => do not retrieve PREL but all the rest
        request = "spectrum=CURR/DETBK/GASBK/QC, analysis=CURR"

        # check if we have some sids or we get it from some dates
        self._log.info("*************************************************************")

        if "sids" in a_args:
            sids = a_args["sids"]
        elif "from" in a_args or "end" in a_args or "stations" in a_args:
            begin = a_args.get(
                "from", ctbto.common.time_utils.getOracleDateFromISO8601(ctbto.common.time_utils.getYesterday())
            )
            end = a_args.get(
                "end", ctbto.common.time_utils.getOracleDateFromISO8601(ctbto.common.time_utils.getToday())
            )
            stations = a_args.get("stations", None)
            if stations != None:
                stations = self._get_stations_ids(stations)
            else:
                stations = self._get_all_stations(a_args["station_types"])
            sids = self._get_list_of_sampleIDs(stations, begin, end)
        else:
            # if the cache has been clean then exit quietly as one action has been performed
            if cache_cleaned or local_spectra_cleaned:
                return
            else:
                # no actions performed error
                raise Exception("need either a sid or some dates or a station name")

        self._log.info("Start the product generation")
        self._log.info("*************************************************************\n")

        to_ignore = self._conf.getlist("IgnoreSamples", "noblegazSamples")
        always_recreate_files = a_args["always_recreate_files"]

        for sid in sids:

            if sid in to_ignore:
                self._log.info("*************************************************************")
                self._log.info("Ignore the retrieval of the sample id %s as it is incomplete." % (sid))
                self._log.info("*************************************************************\n")
                # skip this iteration
                continue

            self._log.info("*************************************************************")
            self._log.info("Fetch data and build SAMPML data file for %s" % (sid))

            # if the right flag is set and the file already exists do not recreate it
            if always_recreate_files or not os.path.exists("%s/ARR/ARR-%s.html" % (dir, sid)):

                # fetch noble gaz or particulate
                fetcher = DBDataFetcher.getDataFetcher(self._ngMainConn, self._ngArchConn, sid)

                # prevent from trying to generate particulate samples
                if isinstance(fetcher, ctbto.db.datafetchers.ParticulateDataFetcher):
                    raise Exception("Generation for Particulate ARR files not supported")

                # modify remoteHost
                fetcher.setRemoteHost(self._conf.get("RemoteAccess", "nobleGazRemoteHost", "dls007"))

                fetcher.fetch(request, "GAS")

                renderer = BaseRenderer.getRenderer(fetcher)

                xmlStr = renderer.asXmlStr(request)

                station_code = fetcher.get("STATION_CODE")

                path = "%s/samples/sampml-full-%s-%s.xml" % (dir, station_code, sid)

                self._log.info("Save SAMPML data in %s" % (path))

                # pretty print and save in file
                ctbto.common.xml_utils.pretty_print_xml(StringIO.StringIO(xmlStr), path)

                # create ARR if possible
                self._create_arr(fetcher, dir, path, sid, station_code)

            else:
                self._log.info("products are already existing in %s for %s" % (dir, sid))

            self._log.info("*************************************************************\n")

    def _create_arr(self, a_fetcher, a_dir, a_path, a_sid, a_station_code):
        """ create the ARR if possible.
            ARRs will be created only for SPHDF and SPHDP
        """
        current = a_fetcher.get("CURRENT_CURR")

        if current != None:
            splitted = current.split("_")

            measurement_type = splitted[0]

            # generate arr only for SPHD (FULL and PREL) normally
            if measurement_type in ("SPHD", "PREL"):
                if a_fetcher.get("SAMPLE_TYPE") == "SAUNA":
                    self._log.info("Create ARR from SAUNA SAMPML data file for %s" % (a_sid))

                    ren = SAUNAXML2HTMLRenderer(self._conf.get("Transformer", "templateDir"))

                    result = ren.render(a_path)

                    path = "%s/ARR/ARR-%s-%s.html" % (a_dir, a_station_code, a_sid)

                    self._log.info("save file in %s" % (path))

                    ctbto.common.utils.printInFile(result, path)

                elif a_fetcher.get("SAMPLE_TYPE") == "SPALAX":
                    self._log.info("Create ARR from SPALAX SAMPML data file for %s" % (a_sid))

                    ren = SPALAXXML2HTMLRenderer(self._conf.get("Transformer", "templateDir"))

                    result = ren.render(a_path)

                    path = "%s/ARR/ARR-%s-%s.html" % (a_dir, a_station_code, a_sid)

                    self._log.info("save file in %s" % (path))

                    ctbto.common.utils.printInFile(result, path)
            else:
                self._log.info("Cannot create a ARR for a sample with a type %s" % (measurement_type))
        else:
            self._log.error("Sample %s hasn't got any CURRENT_CURR in its fetcher" % (a_sid))
Exemplo n.º 7
0
class TestSAMPMLCreator(unittest.TestCase):
    
    # Class members
    c_log = logging.getLogger("main_tests.TestSAMPMLCreator")
    c_log.setLevel(logging.INFO)
    
    def _get_tests_dir_path(self):
        """ get the ctbto.tests path depending on where it is defined """
        
        fmod_path = ctbto.tests.__path__
        
        test_dir = "%s/conf_tests"%fmod_path[0]
        
        return test_dir
    
    def __init__(self,stuff):
        super(TestSAMPMLCreator,self).__init__(stuff)
        
        myBasicLoggingConfig()
        
        os.environ['RNPICKER_CONF_DIR'] = self._get_tests_dir_path()
        
        os.environ[Conf.ENVNAME] = '%s/%s'%(self._get_tests_dir_path(),'rnpicker.config')
        
        # create an empty shell Conf object
        self.conf = Conf.get_instance()
    
        self.mainDatabase = None
        self.mainUser = None
        self.mainPassword = None
        self.mainConn = None
        self.mainActivateTimer = False
        
        self.ParticulateArchiveDatabaseAccess = None
        self.archiveUser = None
        self.archivePassword = None
        self.archiveActivateTimer = False
        self.archConn = None
        
        self.xpath_calIDs = None
        self.xpath_specalIDs = None
        
        self.nbDatabase = None
        self.nbUser = None
        self.nbPassword = None
        self.nbActivateTimer = False
        self.nbConn = None
        
        TestSAMPMLCreator.c_log.info("\n********************************************************************************\n rnpicker modules are loaded from %s\n********************************************************************************\n"%(self._get_tests_dir_path()))
    
    def _setUpGenieParticulate(self):
        
        
        self.mainDatabase = self.conf.get("ParticulateDatabaseAccess","hostname")
        self.mainUser = self.conf.get("ParticulateDatabaseAccess","user")
        self.mainPassword = self.conf.get("ParticulateDatabaseAccess","password")
        self.mainActivateTimer = self.conf.getboolean("ParticulateDatabaseAccess","activateTimer",True)
   
        TestSAMPMLCreator.c_log.info("")
        TestSAMPMLCreator.c_log.info("Particulate Database=%s"%(self.mainDatabase))
   
        # create DB connector
        self.mainConn = DatabaseConnector(self.mainDatabase,self.mainUser,self.mainPassword,self.mainActivateTimer)
   
        self.mainConn.connect()
        
        self.ParticulateArchiveDatabaseAccess = self.conf.get("ParticulateArchiveDatabaseAccess","hostname")
        self.archiveUser = self.conf.get("ParticulateArchiveDatabaseAccess","user")
        self.archivePassword = self.conf.get("ParticulateArchiveDatabaseAccess","password")
        self.archiveActivateTimer = self.conf.getboolean("ParticulateArchiveDatabaseAccess","activateTimer",True)
   
   
        TestSAMPMLCreator.c_log.info("Archive Database=%s"%(self.ParticulateArchiveDatabaseAccess))
   
        # create DB connector
        self.archConn = DatabaseConnector(self.ParticulateArchiveDatabaseAccess,self.archiveUser,self.archivePassword,self.archiveActivateTimer)
   
        self.archConn.connect()
        
        # compile xpath expressions used to check final product
        self.xpath_calIDs = etree.XPath("//*[local-name(.)='CalibrationInformation']/*[local-name(.)='Calibration']/@ID")
        self.xpath_specalIDs = etree.XPath("//*[local-name(.)='MeasuredInformation']/*[local-name(.)='Spectrum']/@calibrationIDs")
    
    def _setUpNobleGaz(self):
           
        self.nbDatabase = self.conf.get("NobleGazDatabaseAccess","hostname")
        self.nbUser = self.conf.get("NobleGazDatabaseAccess","user")
        self.nbPassword = self.conf.get("NobleGazDatabaseAccess","password")
        self.nbActivateTimer = self.conf.getboolean("NobleGazDatabaseAccess","activateTimer",True)
   
   
        TestSAMPMLCreator.c_log.info("Noble Gaz Database=%s"%(self.nbDatabase))
   
        # create DB connector
        self.nbConn = DatabaseConnector(self.nbDatabase,self.nbUser,self.nbPassword,self.nbActivateTimer)
   
        self.nbConn.connect()
    
    
    def setUp(self):
   
        self._setUpGenieParticulate()
        
        self._setUpNobleGaz()
        
        
    def assertIfNoTagsLeft(self,path):
        """
Check that no tags are left in the XML
"""
        
        # pattern for looking for tags
        pattern="\${\w*}"
        
        # first read file in memory as the file is small
        f = open(path,"r")
        
        strToCheck = f.read()
        
        f.close()
        
        res = re.findall(pattern, strToCheck)
         
        self.failUnless((len(res) == 0), "Error. the file %s contains the following tags=%s"%(path,res))
        
    def assertAllCalibrationInfo(self,path):
        """
Check that the calibration info is there
"""
        
        tree = etree.parse(open(path,"r"))
        
        #xpath1 = etree.XPath("//CalibrationInformation/Calibration[@ID]")
        
        calibrationIDs = self.xpath_calIDs(tree)
        specCalIDs = self.xpath_specalIDs(tree)
 
        TestSAMPMLCreator.c_log.debug("spec cal = %s\n"%(specCalIDs))
        TestSAMPMLCreator.c_log.debug("calibrationIDs =%s\n"%(calibrationIDs))
        
        for cals in specCalIDs:
            # split string
            clist = cals.split(' ')
            for elem in clist:
                self.failUnless((elem in calibrationIDs), "Error the following calibration info %s is not defined in the <Calibration> Tag. Xml file produced %s\n"%(elem,path))
        
   
    def assertFileContentEquals(self,a_master_path,a_tocheck_path):
        """
check at the string level that the two files are identical otherwise fail
"""
        TestSAMPMLCreator.c_log.info("Start bit checking")
        
        linenum = 1
        master = open(a_master_path,'r')
        tocheck = open(a_tocheck_path,'r')
        
        for m_line in master:
            c_line = tocheck.readline()
            
            if m_line != c_line:
                self.fail("line num %d is different on the master %s and on the file to check %s.\n master line:[%s]\n tcheck line:[%s]"%(linenum,a_master_path,a_tocheck_path,m_line,c_line))
          
        TestSAMPMLCreator.c_log.info("End of bit checking")
        
    def getListOfSampleIDs(self,beginDate='2008-07-01',endDate='2008-07-31',spectralQualif='FULL',nbOfElem='100'):
        
        result = self.mainConn.execute(SQL_GETSAMPLEIDS%(beginDate,endDate,spectralQualif,nbOfElem))
        
        sampleIDs= []
        
        rows = result.fetchall()
       
        for row in rows:
            sampleIDs.append(row[0])
       
        TestSAMPMLCreator.c_log.info("samples %s\n"%(sampleIDs))
      
        return sampleIDs
   
    def getListOfSaunaSampleIDs(self,beginDate='2008-07-01',endDate='2008-07-31',spectralQualif='FULL',nbOfElem='100'):
        
        result = self.nbConn.execute(SQL_GETSAUNASAMPLEIDS%(beginDate,endDate,spectralQualif,nbOfElem))
        
        sampleIDs= []
        
        rows = result.fetchall()
       
        for row in rows:
            sampleIDs.append(row[0])
       
        TestSAMPMLCreator.c_log.info("sauna samples %s\n"%(sampleIDs))
      
        return sampleIDs
    
    def getListOfSpalaxSampleIDs(self,beginDate='2008-07-01',endDate='2008-07-31',spectralQualif='FULL',nbOfElem='100'):
        
        result = self.nbConn.execute(SQL_GETSPALAXSAMPLEIDS%(beginDate,endDate,spectralQualif,nbOfElem))
        
        sampleIDs= []
        
        rows = result.fetchall()
       
        for row in rows:
            sampleIDs.append(row[0])
       
        TestSAMPMLCreator.c_log.info("spalax samples %s\n"%(sampleIDs))
      
        return sampleIDs
    
          
    def ztestGetOneParticulateSampleAndDoBitChecking(self):
        """
get a unique particulate sample and do a bit checking against a registered existing sample
"""
        
        request="spectrum=CURR, analysis=CURR"
        cpt = 0
        total_t0 = time.time()
        
        #listOfSamplesToTest = self.getListOfSampleIDs('2008-12-24',endDate='2008-12-25',spectralQualif='FULL',nbOfElem='1')
        
        #sampleID = listOfSamplesToTest[0]
        sampleID = 967273
        
        # fetchnoble particulate
        fetcher = DBDataFetcher.getDataFetcher(self.mainConn,self.archConn,sampleID)
            
        fetcher.fetch(request,'PAR')
                 
        renderer = GenieParticulateRenderer(fetcher)
   
        xmlStr = renderer.asXmlStr(request)
           
        path = "/tmp/samples/sampml-full-%s.xml"%(sampleID)
   
        ctbto.common.xml_utils.pretty_print_xml(StringIO.StringIO(xmlStr),path)
           
        # check if no tags are left
        self.assertIfNoTagsLeft(path)
           
        self.assertAllCalibrationInfo(path)
        
        self.assertFileContentEquals("%s/samples/sampml-full-%s.xml.master"%(self._get_tests_dir_path(),sampleID),path)
                           
        cpt +=1
        
        total_t1 = time.time()
        
        TestSAMPMLCreator.c_log.info("\n****************************************************************************\n****************************************************************************\n****** EXECUTED %d FULL SAMPLE RETRIEVALS in %s seconds ********\n****************************************************************************\n****************************************************************************\n"%(cpt,total_t1-total_t0))
 
 
 
    def ztestFullGenieParticulateSamples(self):
        """
test Genie Particulate samples
"""
         
        request="spectrum=CURR/QC/BK, analysis=CURR"
        
        # get full 2003-10-24 to 2003-10-26
        listOfSamplesToTest = self.getListOfSampleIDs('2007-01-04',endDate='2007-01-15',spectralQualif='FULL',nbOfElem='5')
        
        # error
        # these two are working
        #listOfSamplesToTest = [ "916900","917873" ]
               
        #transform in numbers and retransform in str to remove the 0 at the beginning of the number"
        #intifiedlist = map(int,listOfSamplesToTest)
        
        #listOfSamplesToTest = map(str,intifiedlist)
        
        TestSAMPMLCreator.c_log.info("list samples: %s"%listOfSamplesToTest)
        
        cpt = 0
        total_t0 = time.time()
        
        for sampleID in listOfSamplesToTest:
            
            TestSAMPMLCreator.c_log.info("\n********************************************************************************\n Start Test %d for SampleID %s.\n********************************************************************************\n"%(cpt,sampleID))
           
            t0 = time.time()
           
            # fetchnoble particulate
            fetcher = DBDataFetcher.getDataFetcher(self.mainConn,self.archConn,sampleID)
            
            fetcher.fetch(request,'PAR')
                 
            renderer = GenieParticulateRenderer(fetcher)
   
            xmlStr = renderer.asXmlStr(request)
           
            path = "/tmp/samples/sampml-full-%s.xml"%(sampleID)
   
            ctbto.common.xml_utils.pretty_print_xml(StringIO.StringIO(xmlStr),path)
           
            # check if no tags are left
            self.assertIfNoTagsLeft(path)
           
            self.assertAllCalibrationInfo(path)
           
            t1 = time.time()
           
            TestSAMPMLCreator.c_log.info("\n********************************************************************************\n End of Test %d for SampleID %s. Test executed in %s seconds.\n********************************************************************************\n"%(cpt,sampleID,(t1-t0)))
                       
            cpt +=1
        
        total_t1 = time.time()
        
        TestSAMPMLCreator.c_log.info("\n****************************************************************************\n****************************************************************************\n****** EXECUTED %d FULL SAMPLE RETRIEVALS in %s seconds ********\n****************************************************************************\n****************************************************************************\n"%(cpt,total_t1-total_t0))
 
    def ztestFullNobleGazSamples(self):
        """
Get Full Noble Gaz samples.
"""
         
        request="spectrum=CURR/DETBK/GASBK/QC, analysis=CURR"
        
        # get full
        listOfSamplesToTest = self.getListOfSaunaSampleIDs('2008-08-11',endDate='2008-12-12',spectralQualif='FULL',nbOfElem='2')
               
        # remove sampleID for which data isn't available
        if "141372" in listOfSamplesToTest:
            listOfSamplesToTest.remove("141372")
               
        TestSAMPMLCreator.c_log.info("list samples :%s"%(listOfSamplesToTest))
        
        cpt = 0
        total_t0 = time.time()
        
        for sampleID in listOfSamplesToTest:
            
            TestSAMPMLCreator.c_log.info("Start Test %d for SampleID %s.\n"%(cpt,sampleID))
           
            t0 = time.time()
           
            # fetchnoble particulate
            fetcher = DBDataFetcher.getDataFetcher(self.nbConn,self.archConn,sampleID)
            
            #modify remoteHost
            fetcher.setRemoteHost(self.conf.get('RemoteAccess','nobleGazRemoteHost','dls007'))
   
            fetcher.fetch(request,'GAS')
                 
            renderer = SaunaRenderer(fetcher)
   
            xmlStr = renderer.asXmlStr(request)
           
           #print "Non Formatted String [%s]\n"%(xmlStr)
           
           #f = open("/tmp/xmlStr.xml","w")
           
           #f.write(xmlStr)
           #f.close()
   
            path = "/tmp/samples/sampml-full-%s.xml"%(sampleID)
   
            ctbto.common.xml_utils.pretty_print_xml(StringIO.StringIO(xmlStr),path)
           
            # check if no tags are left
            self.assertIfNoTagsLeft(path)
           
           #self.assertAllCalibrationInfo(path)
           
            t1 = time.time()
           
            #TestSAMPMLCreator.c_log.info("End of Test %d for SampleID %s.\nTest executed in %s seconds.\n\n**************************************************************** \n**************************************************************** \n"%(cpt,sampleID,(t1-t0)))
            TestSAMPMLCreator.c_log.info("\n********************************************************************************\n End of Test %d for SampleID %s. Test executed in %s seconds.\n********************************************************************************\n"%(cpt,sampleID,(t1-t0)))
           
            cpt +=1
        
        total_t1 = time.time()
        
        TestSAMPMLCreator.c_log.info("\n****************************************************************************\n****************************************************************************\n****** EXECUTED %d FULL SAMPLE RETRIEVALS in %s seconds ********\n****************************************************************************\n****************************************************************************\n"%(cpt,total_t1-total_t0))
    
    def ztestSpalaxFullNobleGazSamples(self):
        """
Get Full Noble Gaz samples.
"""
         
        request="spectrum=ALL, analysis=CURR"
        
        # get full
        listOfSamplesToTest = self.getListOfSpalaxSampleIDs('2009-01-20',endDate='2009-02-01',spectralQualif='FULL',nbOfElem='2')
               
        # remove sampleID for which data isn't available
        #if "141372" in listOfSamplesToTest:
        # listOfSamplesToTest.remove("141372")
        #PREL 211385
        listOfSamplesToTest = ['269892']
        TestSAMPMLCreator.c_log.info("list samples :%s"%(listOfSamplesToTest))
        
        cpt = 0
        total_t0 = time.time()
        
        for sampleID in listOfSamplesToTest:
            
            TestSAMPMLCreator.c_log.info("Start Test %d for SampleID %s.\n"%(cpt,sampleID))
           
            t0 = time.time()
           
            # fetchnoble particulate
            fetcher = DBDataFetcher.getDataFetcher(self.nbConn,self.archConn,sampleID)
            
            #modify remoteHost
            fetcher.setRemoteHost(self.conf.get('RemoteAccess','nobleGazRemoteHost','dls007'))
   
            fetcher.fetch(request,'GAS')
                 
            renderer = SpalaxRenderer(fetcher)
   
            xmlStr = renderer.asXmlStr(request)
           
           #print "Non Formatted String [%s]\n"%(xmlStr)
           
           #f = open("/tmp/xmlStr.xml","w")
           
           #f.write(xmlStr)
           #f.close()
   
            path = "/tmp/samples/sampml-full-%s.xml"%(sampleID)
   
            ctbto.common.xml_utils.pretty_print_xml(StringIO.StringIO(xmlStr),path)
           
            # check if no tags are left
            self.assertIfNoTagsLeft(path)
           
           #self.assertAllCalibrationInfo(path)
           
            t1 = time.time()
           
            #TestSAMPMLCreator.c_log.info("End of Test %d for SampleID %s.\nTest executed in %s seconds.\n\n**************************************************************** \n**************************************************************** \n"%(cpt,sampleID,(t1-t0)))
            TestSAMPMLCreator.c_log.info("\n********************************************************************************\n End of Test %d for SampleID %s. Test executed in %s seconds.\n********************************************************************************\n"%(cpt,sampleID,(t1-t0)))
           
            cpt +=1
        
        total_t1 = time.time()
        
        TestSAMPMLCreator.c_log.info("\n****************************************************************************\n****************************************************************************\n****** EXECUTED %d FULL SAMPLE RETRIEVALS in %s seconds ********\n****************************************************************************\n****************************************************************************\n"%(cpt,total_t1-total_t0))
    
    def ztestIterativeSampleIDRetrievals(self):
        
        beginDate = '2009-01-20T00:00:00'
       
        endDate   = '2009-01-21T00:00:00'
       
        d1 = ctbto.common.time_utils.getOracleDateFromISO8601(beginDate)
        d2 = ctbto.common.time_utils.getOracleDateFromISO8601(endDate)
       
        new_list = self.getListOfSampleIDs(d1,d2,spectralQualif='FULL',nbOfElem='1000000')
        # filename contains email+day
        filename = "/tmp/[email protected]"

        list_of_data = []
               
        if os.path.exists(filename):
            f = open(filename,"r")
            list_of_data = pickle.load(f)
            f.close()

        nb_of_iter = len(list_of_data)  
        
        if nb_of_iter >0:
            previous_set = list_of_data[nb_of_iter-1]
            new_set  = set(new_list)
            
            diff_set = new_set.difference(previous_set)
            
            if len(diff_set) > 0:
                TestSAMPMLCreator.c_log.info("list of new samples %s"%diff_set)
                list_of_data.append(diff_set)
            
        else:
            list_of_data.append(set(new_list))
            
        
        f = open(filename,'w')
        pickle.dump(list_of_data,f) 
        f.close()
       
    
    def testGenerateNobleGasARR(self):
        """
          Generate a Noble Gaz ARR.
        """
        
        request="spectrum=CURR/DETBK/GASBK/QC, analysis=CURR"
        
        # get full
        listOfSamplesToTest = self.getListOfSaunaSampleIDs('2007-11-25',endDate='2008-11-26',spectralQualif='FULL',nbOfElem='5')
              
        # remove sampleID for which data isn't available
        # 206975: No Calibration Available
        toRemove = [141372,206975]
        
        for id in toRemove:
            if id in listOfSamplesToTest:
                listOfSamplesToTest.remove(id)
                
        listOfSamplesToTest = [310317]
               
        TestSAMPMLCreator.c_log.info("list samples %s"%listOfSamplesToTest)
        
        cpt = 1
        total_t0 = time.time()
        
        
        for sampleID in listOfSamplesToTest:
           
            TestSAMPMLCreator.c_log.info("Start Test %d for SampleID %s.\n"%(cpt,sampleID))
           
            t0 = time.time()
           
            # fetch noble gaz or particulate
            fetcher = DBDataFetcher.getDataFetcher(self.nbConn,self.archConn,sampleID)
   
            #modify remoteHost
            fetcher.setRemoteHost(self.conf.get('RemoteAccess','nobleGazRemoteHost','dls007'))
            
            fetcher.fetch(request,'GAS')
                 
            renderer = SaunaRenderer(fetcher)
   
            xmlStr = renderer.asXmlStr(request)
           
            path = "/tmp/samples/sampml-full-%s.xml"%(sampleID)
   
            ctbto.common.xml_utils.pretty_print_xml(StringIO.StringIO(xmlStr),path)
           
            # check if no tags are left
            self.assertIfNoTagsLeft(path)
           
            self.assertAllCalibrationInfo(path)
           
            t1 = time.time()
           
            TestSAMPMLCreator.c_log.info("Fetch sample nb %d with SampleID %s.\nTest executed in %s seconds.\n\n**************************************************************** \n**************************************************************** \n"%(cpt,sampleID,(t1-t0)))
           
            cpt +=1
        
            r = XML2HTMLRenderer('%s/%s'%(self._get_tests_dir_path(),'templates'),'ArrHtml.html')
    
            result = r.render(path)
    
            utils.printInFile(result,"/tmp/ARR-%s.html"%(sampleID))
           
        total_t1 = time.time()
        
        TestSAMPMLCreator.c_log.info("\n****************************************************************************\n****************************************************************************\n****** EXECUTED %d FULL SAMPLE RETRIEVALS in %s seconds ********\n****************************************************************************\n****************************************************************************\n"%(cpt,total_t1-total_t0))