Пример #1
0
def addMyArgs(parser):
    # Use arggroup to get these arguments in their own sub-section (of --help)
    parser.defineArgGroup('RAWMerge_tf', 'RAWMerge specific options')
    parser.add_argument('--inputBSFile',
                        nargs='+',
                        type=trfArgClasses.argFactory(trfArgClasses.argBSFile,
                                                      io='input'),
                        help='Input BS file(s)',
                        group='RAWMerge_tf')
    parser.add_argument(
        '--outputBS_MRGFile',
        '--outputBSFile',
        type=trfArgClasses.argFactory(trfArgClasses.argBSFile, io='output'),
        help=
        'Output merged BS file (best if this file ends in ._0001.data, but see allowRename option below)',
        group='RAWMerge_tf')
    parser.add_argument(
        '--maskEmptyInputs',
        type=trfArgClasses.argFactory(trfArgClasses.argBool),
        group='RAWMerge_tf',
        help=
        'If true then empty BS files are not included in the merge (default True)',
        default=trfArgClasses.argBool('True'))
    parser.add_argument(
        '--allowRename',
        type=trfArgClasses.argFactory(trfArgClasses.argBool),
        group='RAWMerge_tf',
        help=
        'If true merged BS file will be forcibly renamed to the value of "outputBSFile" (default True)',
        default=trfArgClasses.argBool('True'))
Пример #2
0
    def execute(self):
        if 'maxFilesPerSubjob' in self.conf.argdict:
            self._hasExecuted = True
            msg.info('Activating job splitting with {0} files per subjob'.format(self.conf.argdict['maxFilesPerSubjob'].value))

            tmpFiles = self.conf.argdict['inputZeroBiasBSFile'].value
            self._subJobInputs = []
            while len(tmpFiles) > 0:
                self._subJobInputs.append(tmpFiles[0:self.conf.argdict['maxFilesPerSubjob'].value-1])
                del tmpFiles[0:self.conf.argdict['maxFilesPerSubjob'].value-1]

            #### TESTING TESTING!
            # self._subJobInputs = self._subJobInputs[0:2]
            ####

            msg.info('Job will be split into {0} pieces: {1}'.format(len(self._subJobInputs), self._subJobInputs))

            # Now loop over each subjob
            from PyJobTransforms.trfExe import athenaExecutor, executorConfig, bsMergeExecutor
            jobCounter = 0
            outputBSFileList = []
            for sjInputs in self._subJobInputs:
                sjobArgdict = {'triggerBit': self.conf.argdict['triggerBit'],
                              'InputLbnMapFile': self.conf.argdict['InputLbnMapFile'],
                              'checkEventCount': trfArgClasses.argSubstepBool('False', runarg=False)}

                sjOutput = self.conf.argdict['outputBS_SKIMFile'].value[0] + '_sub{0:03d}.tmp'.format(jobCounter)
                outputBSFileList.append(sjOutput)
                sjDataDictionary = {'BS': trfArgClasses.argBSFile(sjInputs, io='input', type='BS', subtype='BS'),
                                    'BS_SKIM':  trfArgClasses.argBSFile(sjOutput, io='output', type='BS', subtype='BS_SKIM')}

                sjconf = executorConfig(sjobArgdict, sjDataDictionary)
                sjexe = athenaExecutor(name='BSOverlayFilter_{0:03d}'.format(jobCounter), skeletonFile = 'EventOverlayJobTransforms/skeleton.BSOverlayFilter_tf.py',
                                       conf=sjconf, inData=set(['BS']), outData=set(['BS_SKIM']))
                sjexe.doAll(input=set(['BS']), output=set(['BS_SKIM']))
                jobCounter += 1

            # Now do the merging
            mrgArgdict = {'maskEmptyInputs': trfArgClasses.argBool(True, runarg=False),
                          'allowRename': trfArgClasses.argBool(True, runarg=False)}
            mrgDataDictionary = {'BS': trfArgClasses.argBSFile(outputBSFileList, io='input', type='BS', subtype='BS_SKIM'),
                                 'BS_MRG': self.conf.argdict['outputBS_SKIMFile']}
            mrgConf = executorConfig(mrgArgdict, mrgDataDictionary)
            mrgExe = bsMergeExecutor(name = 'RAWFileMerge', exe = 'file_merging', conf=mrgConf,
                                     inData = set(['BS']), outData = set(['BS_MRG']))
            mrgExe.doAll(input=set(['BS']), output=set(['BS_MRG']))

            # If we got here then all must be well, so we have executed successfully
            self._rc = 0

        else:
            # Ordinary run
            super(BSJobSplitterExecutor, self).execute()
Пример #3
0
    def preExecute(self, input=set(), output=set()):

        """ Execute runInfo, set environment and check inputtype"""
        # Execute runInfo.py
        runArgs=self.conf._argdict

        checkFileList(runArgs['input'])
        namelist=[]
        for i in range(0,len(dsDict['input'])):
            namelist.append(dsDict['input'][i]['file'])
            
        self.conf.addToArgdict('inputNames', trfArgClasses.argList(namelist))

        nName=namelist[0].count('/')
        fileName=namelist[0].split('/')[nName]
        projectName=str(fileName.split('.')[0])


        if not 'doRunInfo' in runArgs:
            self.conf.addToArgdict('doRunInfo', trfArgClasses.argBool(False))
        else:
            if runArgs['doRunInfo']._value:
                import SCT_CalibAlgs.runInfo as runInfo

                print "RunNumber for the runInfo = " + str(RunNumber) + " " + Stream
                runInfo.main(RunNumber, projectName)

        if not 'splitNoisyStrip' in runArgs:
            self.conf.addToArgdict('splitNoisyStrip', trfArgClasses.argInt(0))
        if not 'doRunSelector' in runArgs:
            self.conf.addToArgdict('doRunSelector', trfArgClasses.argBool(False))

            
            
        # Set STAGE_SVCCLASS
        if not SvcClass is '' and not SvcClass is None:
            os.environ['STAGE_SVCCLASS']=SvcClass

        # Check input type
        inputtype=dsDict['input'][0]['dataset'].split('.')[4]
        print "Input type = " + inputtype
        self.conf.addToArgdict('InputType', trfArgClasses.argString(inputtype))


        # check which parts to be run
        if not 'part' in runArgs:
            self.conf.addToArgdict('part', trfArgClasses.argString('doNoisyStrip'))

        part=runArgs['part']._value


        

        for ipart in part:
            if not ipart in ['doNoisyStrip','doNoiseOccupancy','doDeadChip','doDeadStrip','doHV','doBSErrorDB','doRawOccupancy','doEfficiency','doLorentzAngle','doNoisyLB']:
                self._errMsg = 'Argument part=%s does not match any of the possible candidates' % ipart
                raise trfExceptions.TransformValidationException(trfExit.nameToCode('TRF_ARG_ERRO'), self._errMsg)


        # get prefix
        if not 'prefix' in runArgs:
            self.conf.addToArgdict('prefix', trfArgClasses.argString(''))
        

        prefix=runArgs['prefix']._value

            
        # set job number
        jobnb=''
        # find seperator for jobnumber
        if prefix is not '' : 
            sep=prefix.find('._')
            if ( sep != -1 ) :
                jobnb=prefix[sep+1:]
            elif ( prefix.rfind('#_') != -1 ):
                sep=prefix.rfind('#_')
                jobnb=prefix[sep+1:]
            
            # find seperator for prefix
            sep=prefix.find('#')
            if (sep != -1) :
                prefix=prefix[:sep]
            elif (prefix.find('._') != -1):
                sep=prefix.rfind('._')
                prefix=prefix[:sep]

            # set prefix and jobnumber
            prefix+='.'+jobnb
            runArgs['prefix']._value = prefix


        # When ATLAS is NOT in standby the SCT is, the hitmap root files have 0 events,
        # even though the calibration_SCTNoise streams has 10k+ events.
        # If the noisy strips task is generated, the jobs will fail. A.N has implemented
        # a condition a t0 level so they won't be defined. However,
        # when runSelector uses AtlRunQuery to look for the runs that have 10k+ events
        # in the calibration_SCTNoise stream, those runs that failed or were skipped
        # will appear as waiting to be uploaded, making the rest keep on hold.

        # We include a protection against those cases: if the summed number of events
        # of hitmap files is <10k, we don't execute the noisy strips. Rather, we exit
        # with 'success' status, so the job won't fail at t0, and update the value
        # of the last run uploaded as if this run had been uploaded, to avoid the
        # next run being indefinitely on hold
        # print 'Number of events: ', NumberOfEvents
        if 'doNoisyStrip' in part and runArgs['splitNoisyStrip']._value==2 and NumberOfEvents<10000:
            self._isValidated = True
            self._trf._exitCode = 0
            self._trf._exitMsg = 'Noisy strips trying to read root files with 0 events. Gracefully exit and update lastRun counter to %s' %(RunNumber)

            updateLastRun(RunNumber)
            emptyDic = {}
            self._trf._dataDictionary = emptyDic

            resetTrfSignalHandlers()
            self._trf.generateReport(fast=True)
            sys.exit(0)

#                raise trfExceptions.TransformValidationException(trfExit.nameToCode('TRF_EXEC_SETUP_FAIL'), self._errMsg)


        if jobnb is not '':
            self.conf.addToArgdict('JobNumber', trfArgClasses.argString(jobnb))

        # get RunNumber from datasetName
        if not RunNumber == -1:
            self.conf.addToArgdict('RunNumber', trfArgClasses.argInt(RunNumber))
        if not Stream == '':
            self.conf.addToArgdict('Stream', trfArgClasses.argString(Stream))

        # Do other prerun actions
        super(SCTCalibExecutor, self).preExecute(input,output)
Пример #4
0
def addMyArgs(parser):
    # Use arggroup to get these arguments in their own sub-section (of --help)
    parser.defineArgGroup('ESDMerge_tf', 'ESD merge job specific options')
    parser.add_argument('--inputESDFile', nargs='+',
                        type=trfArgClasses.argFactory(trfArgClasses.argPOOLFile, io='input'),
                        help='Input ESD file', group='ESDMerge_tf')
    parser.add_argument('--outputESD_MRGFile', '--outputESDFile', 
                        type=trfArgClasses.argFactory(trfArgClasses.argPOOLFile, io='output'),
                        help='Output merged ESD file', group='ESDMerge_tf')

    parser.defineArgGroup('AODMerge_tf', 'AOD merge job specific options')
    parser.add_argument('--inputAODFile', nargs='+',
                        type=trfArgClasses.argFactory(trfArgClasses.argPOOLFile, io='input', type='aod'),
                        help='Input AOD file', group='AODMerge_tf')
    parser.add_argument('--inputAOD_MRGFile', nargs='+',
                        type=trfArgClasses.argFactory(trfArgClasses.argPOOLFile, io='input', type='aod'),
                        help='Input (premarged) AOD file', group='AODMerge_tf')
    parser.add_argument('--outputAOD_MRGFile', '--outputAODFile', 
                        type=trfArgClasses.argFactory(trfArgClasses.argPOOLFile, io='output', type='aod'),
                        help='Output merged AOD file', group='AODMerge_tf')
    parser.add_argument('--outputTAGFile', 
                        type=trfArgClasses.argFactory(trfArgClasses.argTAGFile, io='output', type='tag'), 
                        help='Output TAG file', group='AODMerge_tf')

    parser.defineArgGroup('TAGMerge_tf', 'TAG merging specific options')
    parser.add_argument('--inputTAGFile', nargs='+', 
                        type=trfArgClasses.argFactory(trfArgClasses.argTAGFile, io='input'),
                        help='Input TAG file(s)', group='TAGMerge_tf')
    parser.add_argument('--outputTAG_MRGFile',
                        type=trfArgClasses.argFactory(trfArgClasses.argTAGFile, io='output'),
                        help='Output merged TAG file', group='TAGMerge_tf')

    parser.defineArgGroup('DQHistMerge_tf', 'DQ merge specific options')
    parser.add_argument('--inputHISTFile', nargs='+', 
                        type=trfArgClasses.argFactory(trfArgClasses.argHISTFile, io='input', runarg=True, type='hist'), 
                        help='Input DQ HIST file', group='DQHistMerge_tf')
    parser.add_argument('--inputHIST_AODFile', nargs='+', 
                        type=trfArgClasses.argFactory(trfArgClasses.argHISTFile, io='input', runarg=True, type='hist_aod',
                                                      countable=False), 
                        help='Input DQ AOD step monitoring file', group='DQHistMerge_tf')
    parser.add_argument('--inputHIST_ESDFile', nargs='+', 
                        type=trfArgClasses.argFactory(trfArgClasses.argHISTFile, io='input', runarg=True, type='hist_esd',
                                                      countable=False), 
                        help='Input DQ AOD step monitoring file', group='DQHistMerge_tf')
    parser.add_argument('--outputHIST_MRGFile', '--outputHISTFile', nargs='+', 
                        type=trfArgClasses.argFactory(trfArgClasses.argHISTFile, io='output', runarg=True, type='hist'), 
                        help='Output DQ monitoring file', group='DQHistMerge_tf')

    parser.defineArgGroup('RDOMerge_tf', 'RDO merge job specific options')
    parser.add_argument('--inputRDOFile', nargs='+',
                        type=trfArgClasses.argFactory(trfArgClasses.argRDOFile, io='input'),
                        help='Input RDO file', group='RDOMerge_tf')
    parser.add_argument('--outputRDO_MRGFile', '--outputRDOFile', 
                        type=trfArgClasses.argFactory(trfArgClasses.argRDOFile, io='output'),
                        help='Output merged RDO file', group='RDOMerge_tf')

    parser.defineArgGroup('RAWMerge_tf', 'RAWMerge specific options')
    parser.add_argument('--inputBSFile', nargs='+', 
                        type=trfArgClasses.argFactory(trfArgClasses.argBSFile, io='input'),
                        help='Input BS file(s)', group='RAWMerge_tf')
    parser.add_argument('--outputBS_MRGFile', '--outputBSFile', 
                        type=trfArgClasses.argFactory(trfArgClasses.argBSFile, io='output'),
                        help='Output merged BS file (best if this file ends in ._0001.data, but see allowRename option below)', 
                        group='RAWMerge_tf')
    parser.add_argument('--maskEmptyInputs', type=trfArgClasses.argFactory(trfArgClasses.argBool), group='RAWMerge_tf',
                        help='If true then empty BS files are not included in the merge (default True)', 
                        default=trfArgClasses.argBool('True'))
    parser.add_argument('--allowRename', type=trfArgClasses.argFactory(trfArgClasses.argBool), group='RAWMerge_tf',
                        help='If true merged BS file will be forcibly renamed to the value of "outputBSFile" (default True)', 
                        default=trfArgClasses.argBool('True'))

    parser.defineArgGroup('EVNTMerge_tf', 'EVNT merge job specific options')
    parser.add_argument('--inputEVNTFile', nargs='+',
                        type=trfArgClasses.argFactory(trfArgClasses.argEVNTFile, io='input', runarg=True, type='evnt'),
                        help='Input EVNT file', group='EVNTMerge_tf')
    parser.add_argument('--outputEVNT_MRGFile', '--outputEVNTFile', 
                        type=trfArgClasses.argFactory(trfArgClasses.argEVNTFile, io='output', runarg=True, type='evnt'),
                        help='Output merged EVNT file', group='EVNTMerge_tf')
    parser.add_argument('--eventService', type=trfArgClasses.argFactory(trfArgClasses.argBool), metavar = "BOOL",
                        help='Switch AthenaMP to the Event Service configuration', group='EVNTMerge_tf')

    parser.add_argument('--fastPoolMerge', type=trfArgClasses.argFactory(trfArgClasses.argBool),
                        help='Hybrid POOL merging switch (default True)')
Пример #5
0
def addTriggerArgs(parser):
    # Use arggroup to get these arguments in their own sub-section (of --help)
    parser.defineArgGroup('Trigger', 'Specific options related to the trigger configuration used for reprocessing')
    
    #arguments specific for trigger transform
    #writeBS used in literal arguments when running HLT step in athena (not athenaHLT)
    parser.add_argument('--writeBS', type=trfArgClasses.argFactory(trfArgClasses.argBool, runarg=True),
                          help='Needed if running BSRDO to BS step in athena (default: True)', group='Trigger', default=trfArgClasses.argBool(True, runarg=True))
    #input BS file for the HLT step (name just to be unique identifier)
    parser.add_argument('--inputBS_RDOFile', nargs='+', 
                        type=trfArgClasses.argFactory(trfArgClasses.argBSFile, io='input', runarg=True, type='bs'),
                        help='Input bytestream file', group='Trigger')
    #without an outputBSFile name specified then any further steps will know to use tmp.BS
    parser.add_argument('--outputBSFile', nargs='+', 
                        type=trfArgClasses.argFactory(trfArgClasses.argBSFile, io='output', runarg=True, type='bs'),
                        help='Output bytestream file', group='Trigger')
    #select output stream in  BS file
    #athenaHLT writes All streams into one file, but this can't be proceesed by standard reco if it contains events in only PEB streams
    #by defualt selects the Main stream, as likely the most needed option, but can ber reverted to All or any other stream chosen
    parser.add_argument('--streamSelection', type=trfArgClasses.argFactory(trfArgClasses.argString, runarg=True),
                        help='select output stream in  BS file (default: \"Main\"). Specify \"All\" to disable splitting (standard reco will fail on any events with only PEB data)', group='Trigger', default=trfArgClasses.argString("Main", runarg=True))
    #HLT out histogram file, if defined renames expert-monitoring file that is produced automatically
    parser.add_argument('--outputHIST_HLTMONFile', nargs='+', 
                        type=trfArgClasses.argFactory(trfArgClasses.argHISTFile, io='output', runarg=True, countable=False),
                        help='Output HLTMON file', group='Trigger')    
    #Trigger Configuration String as used in reco Steps
    parser.add_argument('--triggerConfig', nargs='+', metavar='substep=TRIGGERCONFIG',
                        type=trfArgClasses.argFactory(trfArgClasses.argSubstep, runarg=True, separator='='),
                        help='Trigger Configuration String. '
			'N.B. This argument uses EQUALS (=) to separate the substep name from the value.', group='Trigger')
    #precommand for athenaHLT aka -c
    parser.add_argument('--precommand', nargs='+', type=trfArgClasses.argFactory(trfArgClasses.argList, runarg=True),
                        help='precommand for athenaHLT aka -c', group='Trigger')
    #postcommand for athenaHLT aka -C
    parser.add_argument('--postcommand', nargs='+', type=trfArgClasses.argFactory(trfArgClasses.argList, runarg=True),
                        help='postcommand for athenaHLT aka -C', group='Trigger')
    #For prodsys to make sure uses inputBS_RDOFile rather than inputBSFile when running the b2r step
    parser.add_argument('--prodSysBSRDO', type=trfArgClasses.argFactory(trfArgClasses.argBool, runarg=True),
                        help='For prodsys to make sure uses inputBS_RDOFile rather than inputBSFile when running the b2r step', group='Trigger')
Пример #6
0
def addTriggerArgs(parser):
    # Use arggroup to get these arguments in their own sub-section (of --help)
    parser.defineArgGroup('Trigger', 'Specific options related to the trigger configuration used for reprocessing')
    
    #new for trigger transform
    #now setup to run athenaHLT, so TODO is to remove:testPhysicsV4 and writeBS
    #TODO: testPhysicsV4 needs deleting as causes double menu loading but left in for now to not add conflicts to the panda tag page
    parser.add_argument('--testPhysicsV4', type=trfArgClasses.argFactory(trfArgClasses.argBool, runarg=True),
                          help='Please do not use this command, to be deleted', group='Trigger')
    parser.add_argument('--writeBS', type=trfArgClasses.argFactory(trfArgClasses.argBool, runarg=True),
                          help='Needed if running BSRDO to BS step in athena (default: True)', group='Trigger', default=trfArgClasses.argBool(True, runarg=True))
    parser.add_argument('--inputBS_RDOFile', nargs='+', 
                        type=trfArgClasses.argFactory(trfArgClasses.argBSFile, io='input', runarg=True, type='bs'),
                        help='Input bytestream file', group='Trigger')
    #without an outputBSFile name specified then any further steps will know to use tmp.BS
    parser.add_argument('--outputBSFile', nargs='+', 
                        type=trfArgClasses.argFactory(trfArgClasses.argBSFile, io='output', runarg=True, type='bs'),
                        help='Output bytestream file', group='Trigger')
    parser.add_argument('--outputHIST_HLTMONFile', nargs='+', 
                        type=trfArgClasses.argFactory(trfArgClasses.argHISTFile, io='output', runarg=True, countable=False),
                        help='Output HLTMON file', group='Trigger')    
    #NTUP_TRIG is added as is not available in ATLASP1HLT, but is available in the reco release
    #hence can be used later in a ATLASP1HLT job if switch releases
    parser.add_argument('--outputNTUP_TRIGFile', nargs='+', 
                        type=trfArgClasses.argFactory(trfArgClasses.argHISTFile, io='output', runarg=True, countable=False), 
                        help='D3PD output NTUP_TRIG file (can be made in substeps e2d,a2d)', group='Trigger')
    #NTUP_COST is added as is not available in ATLASP1HLT, but is available in the reco release
    #hence can be used later in a ATLASP1HLT job if switch releases
    parser.add_argument('--outputNTUP_TRIGCOSTFile', nargs='+', 
                        type=trfArgClasses.argFactory(trfArgClasses.argHISTFile, io='output', runarg=True, countable=False), 
                        help='D3PD output NTUP_TRIGCOST file', group='Trigger')
    #NTUP_RATE is added as is not available in ATLASP1HLT, but is available in the reco release
    #hence can be used later in a ATLASP1HLT job if switch releases
    parser.add_argument('--outputNTUP_TRIGRATEFile', nargs='+', 
                        type=trfArgClasses.argFactory(trfArgClasses.argHISTFile, io='output', runarg=True, countable=False), 
                        help='D3PD output NTUP_TRIGRATE file', group='Trigger')
    #NTUP_TRIGEBWGHT is added as is not available in ATLASP1HLT, but is available in the reco release
    #hence can be used later in a ATLASP1HLT job if switch releases
    parser.add_argument('--outputNTUP_TRIGEBWGHTFile', nargs='+', 
                        type=trfArgClasses.argFactory(trfArgClasses.argHISTFile, io='output', runarg=True, countable=False), 
                        help='D3PD output NTUP_TRIGEBWGHT file', group='Trigger')
    parser.add_argument('--triggerConfig', nargs='+', metavar='substep=TRIGGERCONFIG',
                        type=trfArgClasses.argFactory(trfArgClasses.argSubstep, runarg=True, separator='='),
                        help='Trigger Configuration String. '
			'N.B. This argument uses EQUALS (=) to separate the substep name from the value.', group='Trigger')
    parser.add_argument('--dumpOptions', type=trfArgClasses.argFactory(trfArgClasses.argBool, runarg=True),
                        help='Only for testing: '
                        'dump to stdout the options athenaHLT has received. '
                        'N.B. option consistency is not checked.', group='Trigger')
    parser.add_argument('--precommand', nargs='+', type=trfArgClasses.argFactory(trfArgClasses.argList, runarg=True),
                        help='precommand for athenaHLT aka -c', group='Trigger')
    parser.add_argument('--postcommand', nargs='+', type=trfArgClasses.argFactory(trfArgClasses.argList, runarg=True),
                        help='postcommand for athenaHLT aka -C', group='Trigger')
    parser.add_argument('--eventmodifier', nargs='+', type=trfArgClasses.argFactory(trfArgClasses.argList, runarg=True),
                        help='event-modifier for athenaHLT aka -Z', group='Trigger')
    parser.add_argument('--prodSysBSRDO', type=trfArgClasses.argFactory(trfArgClasses.argBool, runarg=True),
                        help='For prodsys to make sure uses inputBS_RDOFile rather than inputBSFile when running the b2r step', group='Trigger')