Example #1
0
    os.remove(harvester_config.db.database_filename)
except Exception:
    pass

for loggerName, loggerObj in iteritems(logging.Logger.manager.loggerDict):
    if loggerName.startswith('panda.log'):
        if len(loggerObj.handlers) == 0:
            continue
        if loggerName.split('.')[-1] in ['db_proxy']:
            continue
        stdoutHandler = logging.StreamHandler(sys.stdout)
        stdoutHandler.setFormatter(loggerObj.handlers[0].formatter)
        loggerObj.addHandler(stdoutHandler)

queueConfigMapper = QueueConfigMapper()

proxy = DBProxy()
proxy.make_tables(queueConfigMapper)

job = JobSpec()
job.PandaID = 1

job.modificationTime = datetime.datetime.now()
proxy.insert_jobs([job])

newJob = proxy.get_job(1)

a = CommunicatorPool()
a.get_jobs('siteName', 'nodeName', 'prodSourceLabel', 'computingElement', 1,
           {})
    def trigger_stage_out(self, jobspec):
        """Trigger the stage-out procedure for the job.
        Output files are available through jobspec.get_outfile_specs(skip_done=False) which gives
        a list of FileSpecs not yet done.
        FileSpec.attemptNr shows how many times transfer was tried for the file so far.

        :param jobspec: job specifications
        :type jobspec: JobSpec
        :return: A tuple of return code (True: success, False: fatal failure, None: temporary failure)
                 and error dialog
        :rtype: (bool, string)
        """
        
        # make logger
        tmpLog = self.make_logger(baseLogger, 'PandaID={0}'.format(jobspec.PandaID),
                                  method_name='check_stage_out_status')
        tmpLog.debug('start')
        allChecked = True
        ErrMsg = 'These files failed to upload: '
        
        tmpLog.debug('Getting seprodpath from queue_config')
        queue_config = self.queue_config_mapper.get_queue(self.queueName)
        
        tmpLog.debug('Requesting full spec of the job {0}' . format(jobspec.PandaID))
        proxy = DBProxy()
        jobSpec_full = proxy.get_job(jobspec.PandaID)
        
        for fileSpec in jobspec.get_output_file_specs(skip_done=True):
            destination = queue_config.seprodpath
            filename = fileSpec.lfn
            
            se_path = ''
            sw_path = ''
            prod_name = ''
            prodSlt = ''
            TMPMDSTFILE = ''
            TMPHISTFILE = ''
            EVTDUMPFILE = ''
            MERGEDMDSTFILE = ''
            MERGEDHISTFILE = ''
            MERGEDDUMPFILE = ''
            
            if not ".log.tgz" in fileSpec.lfn:
                tmpLog.debug('Getting sw path, name and hist filename from jobPars')
                sw_prefix, sw_path, prod_name, prodSlt, TMPMDSTFILE, TMPHISTFILE, EVTDUMPFILE, MERGEDMDSTFILE, MERGEDHISTFILE, MERGEDDUMPFILE, PRODSOFT, MCGENFILEOUT = self.getSWPathAndNameAndFilename(jobSpec_full.jobParams['jobPars'])
                
                tmpLog.debug('sw_prefix: {0}' . format(sw_prefix))
                tmpLog.debug('sw_path: {0}' . format(sw_path))
                tmpLog.debug('prod_name: {0}' . format(prod_name))
                tmpLog.debug('prodSlt: {0}' . format(prodSlt))
                tmpLog.debug('TMPMDSTFILE: {0}' . format(TMPMDSTFILE))
                tmpLog.debug('TMPHISTFILE: {0}' . format(TMPHISTFILE))
                tmpLog.debug('EVTDUMPFILE: {0}' . format(EVTDUMPFILE))
                tmpLog.debug('MERGEDMDSTFILE: {0}' . format(MERGEDMDSTFILE))
                tmpLog.debug('MERGEDHISTFILE: {0}' . format(MERGEDHISTFILE))
                tmpLog.debug('MERGEDDUMPFILE: {0}' . format(MERGEDDUMPFILE))
                tmpLog.debug('PRODSOFT: {0}' . format(PRODSOFT))
                tmpLog.debug('MCGENFILEOUT: {0}' . format(MCGENFILEOUT))
                           
                # prod
                if fileSpec.lfn == TMPMDSTFILE :
                    se_path = sw_prefix + sw_path + PRODSOFT + '/mDST.chunks'
                if fileSpec.lfn == TMPHISTFILE:
                    se_path = sw_prefix + sw_path + PRODSOFT + '/TRAFDIC'
                if fileSpec.lfn == "testevtdump.raw":
                    se_path = sw_prefix + sw_path + PRODSOFT + '/evtdump/slot' + prodSlt
                    filename = EVTDUMPFILE
                if fileSpec.lfn == "payload_stdout.out.gz":
                    se_path = sw_prefix + sw_path + PRODSOFT + '/logFiles'
                    filename = prod_name + '.' + TMPHISTFILE.replace('.root', '.stdout.gz')
                if fileSpec.lfn == "payload_stderr.out.gz":
                    se_path = sw_prefix + sw_path + PRODSOFT + '/logFiles'
                    filename = prod_name + '.' + TMPHISTFILE.replace('.root', '.stderr.gz')
                                
                # merge
                if fileSpec.lfn == MERGEDMDSTFILE :
                    se_path = sw_prefix + sw_path + PRODSOFT + '/mDST'
                if fileSpec.lfn == MERGEDHISTFILE:
                    se_path = sw_prefix + sw_path + PRODSOFT + '/histos'
                if fileSpec.lfn == MERGEDDUMPFILE:
                    se_path = sw_prefix + sw_path + PRODSOFT + '/mergedDump/slot' + prodSlt
                
                # mc generation
                if fileSpec.lfn == MCGENFILEOUT:
                    se_path = sw_prefix + '/mc/' + sw_path + PRODSOFT + '/mcgen'
                    filename = MCGENFILEOUT
                                
                destination = se_path
                
            surl = "{0}/{1}" . format(destination, filename)
            dst_gpfn = "{0}/{1}" . format(destination, filename)
            lfcdir = destination
            
            tmpLog.debug('fileSpec.path = {0}' . format(fileSpec.path))
            tmpLog.debug('SURL = {0}' . format(surl))
            tmpLog.debug('dst_gpfn = {0}' . format(dst_gpfn))
            tmpLog.debug('lfcdir = {0}' . format(lfcdir))
            
            tmpLog.debug('Create if does not exist {0}' . format(lfcdir))
            if not os.path.exists(lfcdir):
                os.makedirs(lfcdir)
            
            tmpLog.debug('Copy {0} to {1}' . format(fileSpec.path, dst_gpfn))
            shutil.copyfile(fileSpec.path, dst_gpfn)
            if os.path.exists(dst_gpfn):
                fileSpec.status = 'finished'
            else:
                fileSpec.status = 'failed'
                allChecked = False
                ErrMsg += '{0} ' . format(fileSpec.lfn)
            
            # force update
            fileSpec.force_update('status')
            
            tmpLog.debug('Status of file {0} is {1}' . format(fileSpec.path, fileSpec.status))
            
        del jobSpec_full
        
        tmpLog.debug('done')
        
        if allChecked:
            return True, ''
        else:
            return False, ErrMsg
Example #3
0
except Exception:
    pass

for loggerName, loggerObj in iteritems(logging.Logger.manager.loggerDict):
    if loggerName.startswith('panda.log'):
        if len(loggerObj.handlers) == 0:
            continue
        if loggerName.split('.')[-1] in ['db_proxy']:
            continue
        stdoutHandler = logging.StreamHandler(sys.stdout)
        stdoutHandler.setFormatter(loggerObj.handlers[0].formatter)
        loggerObj.addHandler(stdoutHandler)

queueConfigMapper = QueueConfigMapper()

proxy = DBProxy()
proxy.make_tables(queueConfigMapper)

job = JobSpec()
job.PandaID = 1


job.modificationTime = datetime.datetime.now()
proxy.insert_jobs([job])

newJob = proxy.get_job(1)


a = CommunicatorPool()
a.get_jobs('siteName', 'nodeName', 'prodSourceLabel', 'computingElement', 1, {})
proxy = DBProxy()
communicator = CommunicatorPool()
cacher = Cacher(communicator, single_mode=True)
cacher.run()

tmpLog.debug("plugin={0}".format(preparatorCore.__class__.__name__))
tmpLog.debug("BasePath from preparator configuration: %s " %
             preparatorCore.basePath)

# get all jobs in table in a preparing substate
#tmpLog.debug('try to get all jobs in a preparing substate')
#jobSpec_list = proxy.get_jobs_in_sub_status('preparing',2000,None,None,None,None,None,None)
# get all jobs
if job_id > 0:
    tmpLog.debug('try to get job ID - {}'.format(job_id))
    jobSpec_list = [proxy.get_job(job_id)]
else:
    tmpLog.debug('try to get all jobs')
    jobSpec_list = proxy.get_jobs()

tmpLog.debug('got {0} jobs'.format(len(jobSpec_list)))

# loop over all found jobs
if len(jobSpec_list) > 0:
    for jobSpec in jobSpec_list:
        # if user entered a job id check for it
        if job_id > 0:
            if jobSpec.PandaID != job_id:
                continue
            tmpLog.debug(
                ' PandaID = %d status = %s subStatus = %s lockedBy = %s' %
proxy = DBProxy()
communicator = CommunicatorPool()
cacher = Cacher(communicator, single_mode=True)
cacher.run()

tmpLog.debug("plugin={0}".format(preparatorCore.__class__.__name__))
tmpLog.debug("BasePath from preparator configuration: %s " % preparatorCore.basePath)
 
# get all jobs in table in a preparing substate
#tmpLog.debug('try to get all jobs in a preparing substate')
#jobSpec_list = proxy.get_jobs_in_sub_status('preparing',2000,None,None,None,None,None,None)
# get all jobs 
if job_id > 0 :
   tmpLog.debug('try to get job ID - {}'.format(job_id))
   jobSpec_list = [proxy.get_job(job_id)]
else :
   tmpLog.debug('try to get all jobs')
   jobSpec_list = proxy.get_jobs()

tmpLog.debug('got {0} jobs'.format(len(jobSpec_list)))


# loop over all found jobs
if len(jobSpec_list) > 0 :
    for jobSpec in jobSpec_list:
        # if user entered a job id check for it
        if job_id > 0 : 
           if jobSpec.PandaID != job_id :
              continue
           tmpLog.debug(' PandaID = %d status = %s subStatus = %s lockedBy = %s' %