Example #1
0
def checkFileList(filelist):
    """Converts list of files of type ds#filename into a list of filenames, meanwhile
    setting ds value. If check is true it also checks the existence of the files."""
    # First check if type is list
    
    if not isinstance(filelist,list):
        filelist=[filelist]

    for i,ifile in enumerate(filelist):
        # extract ds,runnumber and svcclass
        filename=getDsFileName(ifile)
        # pass file check if file is on castor
        if filename.find('/castor',0,8) != -1:
            pass
        elif not fileutil.exists(filename):
            found = fileutil.exists_suffix_number(filename + '.')
            if not found:

                errMsg = filename+' not found'
                raise trfExceptions.TransformValidationException(trfExit.nameToCode('TRF_INPUT_FILE_VALIDATION_FAIL'), errMsg)

            if found != filename:

                filename = found
        # correct filename in list
        filelist[i]=filename
    return filelist
Example #2
0
    def preExecute(self, input=set(), output=set()):

        """ Execute runInfo, set environment and check inputtype"""
        # Execute runInfo.py
        runArgs=self.conf._argdict

        checkFileList(runArgs['input'])
        namelist=[]
        for i in range(0,len(dsDict['input'])):
            namelist.append(dsDict['input'][i]['file'])
            
        self.conf.addToArgdict('inputNames', trfArgClasses.argList(namelist))

        nName=namelist[0].count('/')
        fileName=namelist[0].split('/')[nName]
        projectName=str(fileName.split('.')[0])


        if not 'doRunInfo' in runArgs:
            self.conf.addToArgdict('doRunInfo', trfArgClasses.argBool(False))
        else:
            if runArgs['doRunInfo']._value:
                import SCT_CalibAlgs.runInfo as runInfo

                print "RunNumber for the runInfo = " + str(RunNumber) + " " + Stream
                runInfo.main(RunNumber, projectName)

        if not 'splitNoisyStrip' in runArgs:
            self.conf.addToArgdict('splitNoisyStrip', trfArgClasses.argInt(0))
        if not 'doRunSelector' in runArgs:
            self.conf.addToArgdict('doRunSelector', trfArgClasses.argBool(False))

            
            
        # Set STAGE_SVCCLASS
        if not SvcClass is '' and not SvcClass is None:
            os.environ['STAGE_SVCCLASS']=SvcClass

        # Check input type
        inputtype=dsDict['input'][0]['dataset'].split('.')[4]
        print "Input type = " + inputtype
        self.conf.addToArgdict('InputType', trfArgClasses.argString(inputtype))


        # check which parts to be run
        if not 'part' in runArgs:
            self.conf.addToArgdict('part', trfArgClasses.argString('doNoisyStrip'))

        part=runArgs['part']._value


        

        for ipart in part:
            if not ipart in ['doNoisyStrip','doNoiseOccupancy','doDeadChip','doDeadStrip','doHV','doBSErrorDB','doRawOccupancy','doEfficiency','doLorentzAngle','doNoisyLB']:
                self._errMsg = 'Argument part=%s does not match any of the possible candidates' % ipart
                raise trfExceptions.TransformValidationException(trfExit.nameToCode('TRF_ARG_ERRO'), self._errMsg)


        # get prefix
        if not 'prefix' in runArgs:
            self.conf.addToArgdict('prefix', trfArgClasses.argString(''))
        

        prefix=runArgs['prefix']._value

            
        # set job number
        jobnb=''
        # find seperator for jobnumber
        if prefix is not '' : 
            sep=prefix.find('._')
            if ( sep != -1 ) :
                jobnb=prefix[sep+1:]
            elif ( prefix.rfind('#_') != -1 ):
                sep=prefix.rfind('#_')
                jobnb=prefix[sep+1:]
            
            # find seperator for prefix
            sep=prefix.find('#')
            if (sep != -1) :
                prefix=prefix[:sep]
            elif (prefix.find('._') != -1):
                sep=prefix.rfind('._')
                prefix=prefix[:sep]

            # set prefix and jobnumber
            prefix+='.'+jobnb
            runArgs['prefix']._value = prefix


        # When ATLAS is NOT in standby the SCT is, the hitmap root files have 0 events,
        # even though the calibration_SCTNoise streams has 10k+ events.
        # If the noisy strips task is generated, the jobs will fail. A.N has implemented
        # a condition a t0 level so they won't be defined. However,
        # when runSelector uses AtlRunQuery to look for the runs that have 10k+ events
        # in the calibration_SCTNoise stream, those runs that failed or were skipped
        # will appear as waiting to be uploaded, making the rest keep on hold.

        # We include a protection against those cases: if the summed number of events
        # of hitmap files is <10k, we don't execute the noisy strips. Rather, we exit
        # with 'success' status, so the job won't fail at t0, and update the value
        # of the last run uploaded as if this run had been uploaded, to avoid the
        # next run being indefinitely on hold
        # print 'Number of events: ', NumberOfEvents
        if 'doNoisyStrip' in part and runArgs['splitNoisyStrip']._value==2 and NumberOfEvents<10000:
            self._isValidated = True
            self._trf._exitCode = 0
            self._trf._exitMsg = 'Noisy strips trying to read root files with 0 events. Gracefully exit and update lastRun counter to %s' %(RunNumber)

            updateLastRun(RunNumber)
            emptyDic = {}
            self._trf._dataDictionary = emptyDic

            resetTrfSignalHandlers()
            self._trf.generateReport(fast=True)
            sys.exit(0)

#                raise trfExceptions.TransformValidationException(trfExit.nameToCode('TRF_EXEC_SETUP_FAIL'), self._errMsg)


        if jobnb is not '':
            self.conf.addToArgdict('JobNumber', trfArgClasses.argString(jobnb))

        # get RunNumber from datasetName
        if not RunNumber == -1:
            self.conf.addToArgdict('RunNumber', trfArgClasses.argInt(RunNumber))
        if not Stream == '':
            self.conf.addToArgdict('Stream', trfArgClasses.argString(Stream))

        # Do other prerun actions
        super(SCTCalibExecutor, self).preExecute(input,output)
Example #3
0
class SCTCalibExecutor( athenaExecutor ):
    def __init__(self, skeleton):
        athenaExecutor.__init__(self,
                                name = 'sctcalib',
                                skeletonFile='/afs/cern.ch/user/s/sctcalib/testarea/latest/athena/InnerDetector/InDetCalibAlgs/SCT_CalibAlgs/share/skeleton.sct_calib.py')
#                                skeletonFile='/afs/cern.ch/work/c/csander/sct/testarea/AtlasOffline/athena/InnerDetector/InDetCalibAlgs/SCT_CalibAlgs/share/skeleton.sct_calib.py')


    def preExecute(self, input=set(), output=set()):

        """ Execute runInfo, set environment and check inputtype"""
        # Execute runInfo.py
        runArgs=self.conf._argdict

        checkFileList(runArgs['input'])
        namelist=[]
        for i in range(0,len(dsDict['input'])):
            namelist.append(dsDict['input'][i]['file'])
            
        self.conf.addToArgdict('inputNames', trfArgClasses.argList(namelist))

        nName=namelist[0].count('/')
        fileName=namelist[0].split('/')[nName]
        projectName=str(fileName.split('.')[0])


        if not 'doRunInfo' in runArgs:
            self.conf.addToArgdict('doRunInfo', trfArgClasses.argBool(False))
        else:
            if runArgs['doRunInfo']._value:
                import SCT_CalibAlgs.runInfo as runInfo

                print "RunNumber for the runInfo = " + str(RunNumber) + " " + Stream
                runInfo.main(RunNumber, projectName)

        if not 'splitNoisyStrip' in runArgs:
            self.conf.addToArgdict('splitNoisyStrip', trfArgClasses.argInt(0))
        if not 'doRunSelector' in runArgs:
            self.conf.addToArgdict('doRunSelector', trfArgClasses.argBool(False))

            
            
        # Set STAGE_SVCCLASS
        if not SvcClass is '' and not SvcClass is None:
            os.environ['STAGE_SVCCLASS']=SvcClass

        # Check input type
        inputtype=dsDict['input'][0]['dataset'].split('.')[4]
        print "Input type = " + inputtype
        self.conf.addToArgdict('InputType', trfArgClasses.argString(inputtype))


        # check which parts to be run
        if not 'part' in runArgs:
            self.conf.addToArgdict('part', trfArgClasses.argString('doNoisyStrip'))

        part=runArgs['part']._value


        

        for ipart in part:
            if not ipart in ['doNoisyStrip','doNoiseOccupancy','doDeadChip','doDeadStrip','doHV','doBSErrorDB','doRawOccupancy','doEfficiency','doLorentzAngle','doNoisyLB']:
                self._errMsg = 'Argument part=%s does not match any of the possible candidates' % ipart
                raise trfExceptions.TransformValidationException(trfExit.nameToCode('TRF_ARG_ERRO'), self._errMsg)


        # get prefix
        if not 'prefix' in runArgs:
            self.conf.addToArgdict('prefix', trfArgClasses.argString(''))
        

        prefix=runArgs['prefix']._value

            
        # set job number
        jobnb=''
        # find seperator for jobnumber
        if prefix is not '' : 
            sep=prefix.find('._')
            if ( sep != -1 ) :
                jobnb=prefix[sep+1:]
            elif ( prefix.rfind('#_') != -1 ):
                sep=prefix.rfind('#_')
                jobnb=prefix[sep+1:]
            
            # find seperator for prefix
            sep=prefix.find('#')
            if (sep != -1) :
                prefix=prefix[:sep]
            elif (prefix.find('._') != -1):
                sep=prefix.rfind('._')
                prefix=prefix[:sep]

            # set prefix and jobnumber
            prefix+='.'+jobnb
            runArgs['prefix']._value = prefix


        # When ATLAS is NOT in standby the SCT is, the hitmap root files have 0 events,
        # even though the calibration_SCTNoise streams has 10k+ events.
        # If the noisy strips task is generated, the jobs will fail. A.N has implemented
        # a condition a t0 level so they won't be defined. However,
        # when runSelector uses AtlRunQuery to look for the runs that have 10k+ events
        # in the calibration_SCTNoise stream, those runs that failed or were skipped
        # will appear as waiting to be uploaded, making the rest keep on hold.

        # We include a protection against those cases: if the summed number of events
        # of hitmap files is <10k, we don't execute the noisy strips. Rather, we exit
        # with 'success' status, so the job won't fail at t0, and update the value
        # of the last run uploaded as if this run had been uploaded, to avoid the
        # next run being indefinitely on hold
        # print 'Number of events: ', NumberOfEvents
        if 'doNoisyStrip' in part and runArgs['splitNoisyStrip']._value==2 and NumberOfEvents<10000:
            self._isValidated = True
            self._trf._exitCode = 0
            self._trf._exitMsg = 'Noisy strips trying to read root files with 0 events. Gracefully exit and update lastRun counter to %s' %(RunNumber)

            updateLastRun(RunNumber)
            emptyDic = {}
            self._trf._dataDictionary = emptyDic

            resetTrfSignalHandlers()
            self._trf.generateReport(fast=True)
            sys.exit(0)

#                raise trfExceptions.TransformValidationException(trfExit.nameToCode('TRF_EXEC_SETUP_FAIL'), self._errMsg)


        if jobnb is not '':
            self.conf.addToArgdict('JobNumber', trfArgClasses.argString(jobnb))

        # get RunNumber from datasetName
        if not RunNumber == -1:
            self.conf.addToArgdict('RunNumber', trfArgClasses.argInt(RunNumber))
        if not Stream == '':
            self.conf.addToArgdict('Stream', trfArgClasses.argString(Stream))

        # Do other prerun actions
        super(SCTCalibExecutor, self).preExecute(input,output)
        




    def execute(self):

        runArgs=self.conf._argdict
        # Check the run for criteria in runSelector
        if runArgs['doRunSelector']._value:
            import SCT_CalibAlgs.runSelector as runSelector
            part=runArgs['part']._value
            if runArgs['splitNoisyStrip']._value == 1 :
              skipQueue = 1
            else:
              skipQueue = 0
            checkRun=runSelector.main(RunNumber,part,skipQueue,Stream)
            if not checkRun:

                print "Run %s didn't pass run selection criteria. It will not be processed and no output will be generated. Finish execution and exit gracefully" %(RunNumber)
                #No processing->no output
                #Need an empry dictionary so the job won't fail in t0
                #when trying to copy output files
                emptyDic = {}
                self._trf._dataDictionary = emptyDic

                    
                self._isValidated = True
                self._trf._exitMsg = 'Did not pass run selection criteria. Finish execution and exit gracefully.'
                self._trf._exitCode = 0
                resetTrfSignalHandlers()
                self._trf.generateReport(fast=True)
                sys.exit(0)


        rootHitmapFiles = []
        rootLbFiles = []
        for inputFileName in runArgs['input'] :
            if inputFileName.find("SCTHitMaps") != -1:
                rootHitmapFiles.append(inputFileName)
            if inputFileName.find("SCTLB") != -1:
                rootLbFiles.append(inputFileName)
           

        if runArgs['splitNoisyStrip']._value ==2 :
            if len(rootLbFiles) == len(rootHitmapFiles) and len(rootHitmapFiles) > 0 :

                fileutil.remove('SCTHitMaps.root')
                fileutil.remove('SCTLB.root')

                cmd = "cp -v $ROOTSYS/bin/hadd . \n"
                cmd += "hadd SCTHitMaps.root " 
                for inputFileName in rootHitmapFiles :
                    cmd += "%s " %(inputFileName)
                cmd += "\n"
    #            cmd += " >> /dev/null 2>&1 \n"
                cmd += "hadd SCTLB.root "
                for inputFileName in rootLbFiles :
                    cmd += "%s " %(inputFileName)
                cmd += "\n"
#            cmd += " >> /dev/null 2>&1 \n"
            
                print cmd
                self._echologger.info('Merging Hitmap and LB files!')
                retcode=1
                try:
                    retcode = os.system(cmd)
                except OSError, e:
                    retcode = 1
                if retcode == 0:
                    self._echologger.info('Root merge successful')
                else:
                    self._echologger.error("FAILED to merge root files")
        


        super(SCTCalibExecutor, self).execute()

        if self._rc is not 0:
            try:

                if 'less than the required minimum number of events' in open('log.sctcalib').read():
                    self._errMsg = 'Successful but warrants further investigation'
                    raise trfExceptions.TransformValidationException(trfExit.nameToCode('TRF_UNKOWN'), self._errMsg)
            except:
                pass